Source code for pytorch_lightning.strategies.single_tpu
# Copyright The PyTorch Lightning team.## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.importosfromtypingimportDict,Optionalimportpytorch_lightningasplfrompytorch_lightning.plugins.io.checkpoint_pluginimportCheckpointIOfrompytorch_lightning.plugins.io.wrapperimport_WrappingCheckpointIOfrompytorch_lightning.plugins.io.xla_pluginimportXLACheckpointIOfrompytorch_lightning.plugins.precisionimportPrecisionPluginfrompytorch_lightning.strategies.single_deviceimportSingleDeviceStrategyfrompytorch_lightning.utilitiesimport_TPU_AVAILABLE,find_shared_parameters,set_shared_parametersif_TPU_AVAILABLE:importtorch_xla.core.xla_modelasxm
[docs]classSingleTPUStrategy(SingleDeviceStrategy):"""Strategy for training on a single TPU device."""strategy_name="single_tpu"def__init__(self,device:int,accelerator:Optional["pl.accelerators.accelerator.Accelerator"]=None,checkpoint_io:Optional[CheckpointIO]=None,precision_plugin:Optional[PrecisionPlugin]=None,debug:bool=False,):super().__init__(accelerator=accelerator,device=xm.xla_device(device),checkpoint_io=checkpoint_io,precision_plugin=precision_plugin,)self.debug=debug@propertydefcheckpoint_io(self)->CheckpointIO:ifself._checkpoint_ioisNone:self._checkpoint_io=XLACheckpointIO()elifisinstance(self._checkpoint_io,_WrappingCheckpointIO):self._checkpoint_io.checkpoint_io=XLACheckpointIO()returnself._checkpoint_io@checkpoint_io.setterdefcheckpoint_io(self,io:Optional[CheckpointIO])->None:self._checkpoint_io=io@propertydefis_distributed(self)->bool:returnFalse
[docs]defsetup(self,trainer:"pl.Trainer")->None:assertself.model,"self.model must be set before find_shared_parameters(self.model)"shared_params=find_shared_parameters(self.model)self.model_to_device()set_shared_parameters(self.model,shared_params)super().setup(trainer)ifself.debug:os.environ["PT_XLA_DEBUG"]=str(1)
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. Read PyTorch Lightning's Privacy Policy.