Source code for lightning_fabric.plugins.precision.tpu_bf16
# Copyright The Lightning AI team.## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.importosimporttorchfromtorchimportTensorfromtyping_extensionsimportLiteralfromlightning_fabric.plugins.precisionimportTPUPrecisionfromlightning_fabric.plugins.precision.utilsimport_convert_fp_tensor
[docs]classTPUBf16Precision(TPUPrecision):"""Plugin that enables bfloats on TPUs."""precision:Literal["bf16"]="bf16"def__init__(self)->None:super().__init__()os.environ["XLA_USE_BF16"]="1"
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. Read PyTorch Lightning's Privacy Policy.