Source code for pytorch_lightning.plugins.precision.hpu
# Copyright The Lightning AI team.## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.fromtypingimportcast,Optional,Unionfromtyping_extensionsimportget_args,Literalfrompytorch_lightning.accelerators.hpuimport_HPU_AVAILABLEfrompytorch_lightning.plugins.precision.precision_pluginimportPrecisionPluginfrompytorch_lightning.utilities.exceptionsimportMisconfigurationExceptionif_HPU_AVAILABLE:fromhabana_frameworks.torch.hpeximporthmp_PRECISION_INPUT_INT=Literal[32,16]_PRECISION_INPUT_STR=Literal["32","16","bf16"]_PRECISION_INPUT=Union[_PRECISION_INPUT_INT,_PRECISION_INPUT_STR]
[docs]classHPUPrecisionPlugin(PrecisionPlugin):"""Plugin that enables bfloat/half support on HPUs. Args: precision: The precision to use. opt_level: Choose optimization level for hmp. bf16_file_path: Path to bf16 ops list in hmp O1 mode. fp32_file_path: Path to fp32 ops list in hmp O1 mode. verbose: Enable verbose mode for hmp. """def__init__(self,precision:_PRECISION_INPUT,opt_level:str="O2",bf16_file_path:Optional[str]=None,fp32_file_path:Optional[str]=None,verbose:bool=False,)->None:ifnot_HPU_AVAILABLE:raiseMisconfigurationException("HPU precision plugin requires HPU devices.")supported_precision=get_args(_PRECISION_INPUT_STR)+get_args(_PRECISION_INPUT_INT)ifprecisionnotinsupported_precision:raiseValueError(f"`Trainer(accelerator='hpu', precision={precision!r})` is not supported."f" `precision` must be one of: {supported_precision}.")self.precision=cast(_PRECISION_INPUT_STR,str(precision))ifself.precisionin("16","bf16"):hmp.convert(opt_level=opt_level,bf16_file_path=bf16_file_path,fp32_file_path=fp32_file_path,isVerbose=verbose)
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. Read PyTorch Lightning's Privacy Policy.