Source code for lightning_fabric.plugins.precision.fsdp
# Copyright The Lightning AI team.## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.fromtypingimportOptional,TYPE_CHECKINGimporttorchfromtyping_extensionsimportLiteralfromlightning_fabric.plugins.precision.native_ampimportMixedPrecisionfromlightning_fabric.utilities.importsimport_TORCH_GREATER_EQUAL_1_12ifTYPE_CHECKING:fromtorch.distributed.fsdp.fully_sharded_data_parallelimportMixedPrecisionasTorchMixedPrecisionfromtorch.distributed.fsdp.sharded_grad_scalerimportShardedGradScaler
[docs]classFSDPPrecision(MixedPrecision):"""AMP for Fully Sharded Data Parallel training."""def__init__(self,precision:Literal["16",16,"bf16"],device:str,scaler:Optional["ShardedGradScaler"]=None)->None:ifnot_TORCH_GREATER_EQUAL_1_12:raiseNotImplementedError("`FSDPPrecision` is supported from PyTorch v1.12.0 onwards.")fromtorch.distributed.fsdp.sharded_grad_scalerimportShardedGradScalersuper().__init__(precision=precision,device=device,scaler=(ShardedGradScaler()ifscalerisNoneandstr(precision)=="16"elseNone),)@propertydefmixed_precision_config(self)->"TorchMixedPrecision":fromtorch.distributed.fsdp.fully_sharded_data_parallelimportMixedPrecisionasTorchMixedPrecisionifself.precision=="16":dtype=torch.float16elifself.precision=="bf16":dtype=torch.bfloat16else:raiseValueError(f"Was unable to infer precision type, received {self.precision!r}.")returnTorchMixedPrecision(param_dtype=dtype,reduce_dtype=dtype,buffer_dtype=dtype,)
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. Read PyTorch Lightning's Privacy Policy.