Source code for pytorch_lightning.accelerators.tpu
# Copyright The PyTorch Lightning team.## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.fromtypingimportAny,Dict,List,Optional,Unionimporttorchfrompytorch_lightning.accelerators.acceleratorimportAcceleratorfrompytorch_lightning.utilitiesimportdevice_parserfrompytorch_lightning.utilities.importsimport_TPU_AVAILABLE,_XLA_AVAILABLEif_XLA_AVAILABLE:importtorch_xla.core.xla_modelasxm
[docs]classTPUAccelerator(Accelerator):"""Accelerator for TPU devices."""
[docs]defget_device_stats(self,device:Union[str,torch.device])->Dict[str,Any]:"""Gets stats for the given TPU device. Args: device: TPU device for which to get stats Returns: A dictionary mapping the metrics (free memory and peak memory) to their values. """memory_info=xm.get_memory_info(device)free_memory=memory_info["kb_free"]peak_memory=memory_info["kb_total"]-free_memorydevice_stats={"avg. free memory (MB)":free_memory,"avg. peak memory (MB)":peak_memory,}returndevice_stats
[docs]@staticmethoddefget_parallel_devices(devices:Union[int,List[int]])->List[int]:"""Gets parallel devices for the Accelerator."""ifisinstance(devices,int):returnlist(range(devices))returndevices
[docs]@staticmethoddefauto_device_count()->int:"""Get the devices when set to auto."""return8
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. Read PyTorch Lightning's Privacy Policy.