Source code for pytorch_lightning.loggers.test_tube
# Copyright The PyTorch Lightning team.## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License."""Test Tube Logger----------------"""fromargparseimportNamespacefromtypingimportAny,Dict,Optional,Unionimportpytorch_lightningasplfrompytorch_lightning.loggers.baseimportLightningLoggerBase,rank_zero_experimentfrompytorch_lightning.utilitiesimport_module_availablefrompytorch_lightning.utilities.loggerimport_add_prefix,_convert_params,_flatten_dictfrompytorch_lightning.utilities.rank_zeroimportrank_zero_deprecation,rank_zero_only,rank_zero_warn_TESTTUBE_AVAILABLE=_module_available("test_tube")if_TESTTUBE_AVAILABLE:fromtest_tubeimportExperimentelse:Experiment=None
[docs]classTestTubeLogger(LightningLoggerBase):r""" Log to local file system in `TensorBoard <https://www.tensorflow.org/tensorboard>`_ format but using a nicer folder structure (see `full docs <https://williamfalcon.github.io/test-tube>`_). Warning: The test-tube package is no longer maintained and PyTorch Lightning will remove the :class:´TestTubeLogger´ in v1.7.0. Install it with pip: .. code-block:: bash pip install test_tube .. code-block:: python from pytorch_lightning import Trainer from pytorch_lightning.loggers import TestTubeLogger logger = TestTubeLogger("tt_logs", name="my_exp_name") trainer = Trainer(logger=logger) Use the logger anywhere in your :class:`~pytorch_lightning.core.lightning.LightningModule` as follows: .. code-block:: python from pytorch_lightning import LightningModule class LitModel(LightningModule): def training_step(self, batch, batch_idx): # example self.logger.experiment.whatever_method_summary_writer_supports(...) def any_lightning_module_function_or_hook(self): self.logger.experiment.add_histogram(...) Args: save_dir: Save directory name: Experiment name. Defaults to ``'default'``. description: A short snippet about this experiment debug: If ``True``, it doesn't log anything. version: Experiment version. If version is not specified the logger inspects the save directory for existing versions, then automatically assigns the next available version. create_git_tag: If ``True`` creates a git tag to save the code used in this experiment. log_graph: Adds the computational graph to tensorboard. This requires that the user has defined the `self.example_input_array` attribute in their model. prefix: A string to put at the beginning of metric keys. Raises: ModuleNotFoundError: If required TestTube package is not installed on the device. """__test__=FalseLOGGER_JOIN_CHAR="-"def__init__(self,save_dir:str,name:str="default",description:Optional[str]=None,debug:bool=False,version:Optional[int]=None,create_git_tag:bool=False,log_graph:bool=False,prefix:str="",):rank_zero_deprecation("The TestTubeLogger is deprecated since v1.5 and will be removed in v1.7. We recommend switching to the"" `pytorch_lightning.loggers.TensorBoardLogger` as an alternative.")ifExperimentisNone:raiseModuleNotFoundError("You want to use `test_tube` logger which is not installed yet,"" install it with `pip install test-tube`.")super().__init__()self._save_dir=save_dirself._name=nameself.description=descriptionself.debug=debugself._version=versionself.create_git_tag=create_git_tagself._log_graph=log_graphself._prefix=prefixself._experiment=None@property@rank_zero_experimentdefexperiment(self)->Experiment:r""" Actual TestTube object. To use TestTube features in your :class:`~pytorch_lightning.core.lightning.LightningModule` do the following. Example:: self.logger.experiment.some_test_tube_function() """ifself._experimentisnotNone:returnself._experimentself._experiment=Experiment(save_dir=self.save_dir,name=self._name,debug=self.debug,version=self.version,description=self.description,create_git_tag=self.create_git_tag,rank=rank_zero_only.rank,)returnself._experiment
[docs]@rank_zero_onlydeflog_hyperparams(self,params:Union[Dict[str,Any],Namespace])->None:# TODO: HACK figure out where this is being set to trueself.experiment.debug=self.debugparams=_convert_params(params)params=_flatten_dict(params)self.experiment.argparse(Namespace(**params))
[docs]@rank_zero_onlydeflog_metrics(self,metrics:Dict[str,float],step:Optional[int]=None)->None:# TODO: HACK figure out where this is being set to truemetrics=_add_prefix(metrics,self._prefix,self.LOGGER_JOIN_CHAR)self.experiment.debug=self.debugself.experiment.log(metrics,global_step=step)
[docs]@rank_zero_onlydeflog_graph(self,model:"pl.LightningModule",input_array=None):ifself._log_graph:ifinput_arrayisNone:input_array=model.example_input_arrayifinput_arrayisnotNone:self.experiment.add_graph(model,model._apply_batch_transfer_handler(input_array))else:rank_zero_warn("Could not log computational graph since neither the"" `model.example_input_array` attribute is set nor"" `input_array` was given",)
[docs]@rank_zero_onlydefsave(self)->None:super().save()# TODO: HACK figure out where this is being set to trueself.experiment.debug=self.debugself.experiment.save()
[docs]@rank_zero_onlydeffinalize(self,status:str)->None:super().finalize(status)# TODO: HACK figure out where this is being set to trueself.experiment.debug=self.debugself.save()self.close()
[docs]@rank_zero_onlydefclose(self)->None:super().save()# TODO: HACK figure out where this is being set to trueself.experiment.debug=self.debugifnotself.debug:exp=self.experimentexp.close()
@propertydefsave_dir(self)->Optional[str]:"""Gets the save directory. Returns: The path to the save directory. """returnself._save_dir@propertydefname(self)->str:"""Gets the experiment name. Returns: The experiment name if the experiment exists, else the name specified in the constructor. """ifself._experimentisNone:returnself._namereturnself.experiment.name@propertydefversion(self)->int:"""Gets the experiment version. Returns: The experiment version if the experiment exists, else the next version. """ifself._experimentisNone:returnself._versionreturnself.experiment.version# Test tube experiments are not pickleable, so we need to override a few# methods to get DDP working. See# https://docs.python.org/3/library/pickle.html#handling-stateful-objects# for more info.def__getstate__(self)->Dict[Any,Any]:state=self.__dict__.copy()state["_experiment"]=self.experiment.get_meta_copy()returnstatedef__setstate__(self,state:Dict[Any,Any]):self._experiment=state["_experiment"].get_non_ddp_exp()delstate["_experiment"]self.__dict__.update(state)
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. Read PyTorch Lightning's Privacy Policy.