# Copyright The Lightning AI team.## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.importcsvimportloggingimportosfromargparseimportNamespacefromtypingimportAny,Dict,List,Optional,UnionfromtorchimportTensorfromlightning_fabric.loggers.loggerimportLogger,rank_zero_experimentfromlightning_fabric.utilities.loggerimport_add_prefixfromlightning_fabric.utilities.rank_zeroimportrank_zero_only,rank_zero_warnfromlightning_fabric.utilities.typesimport_PATHlog=logging.getLogger(__name__)
[docs]classCSVLogger(Logger):r""" Log to the local file system in CSV format. Logs are saved to ``os.path.join(root_dir, name, version)``. Args: root_dir: The root directory in which all your experiments with different names and versions will be stored. name: Experiment name. Defaults to ``'lightning_logs'``. version: Experiment version. If version is not specified the logger inspects the save directory for existing versions, then automatically assigns the next available version. prefix: A string to put at the beginning of metric keys. flush_logs_every_n_steps: How often to flush logs to disk (defaults to every 100 steps). Example:: from lightning.fabric.loggers import CSVLogger logger = CSVLogger("path/to/logs/root", name="my_model") logger.log_metrics({"loss": 0.235, "acc": 0.75}) logger.finalize("success") """LOGGER_JOIN_CHAR="-"def__init__(self,root_dir:_PATH,name:str="lightning_logs",version:Optional[Union[int,str]]=None,prefix:str="",flush_logs_every_n_steps:int=100,):super().__init__()self._root_dir=os.fspath(root_dir)self._name=nameor""self._version=versionself._prefix=prefixself._experiment:Optional[_ExperimentWriter]=Noneself._flush_logs_every_n_steps=flush_logs_every_n_steps@propertydefname(self)->str:"""Gets the name of the experiment. Returns: The name of the experiment. """returnself._name@propertydefversion(self)->Union[int,str]:"""Gets the version of the experiment. Returns: The version of the experiment if it is specified, else the next version. """ifself._versionisNone:self._version=self._get_next_version()returnself._version@propertydefroot_dir(self)->str:"""Gets the save directory where the versioned CSV experiments are saved."""returnself._root_dir@propertydeflog_dir(self)->str:"""The log directory for this run. By default, it is named ``'version_${self.version}'`` but it can be overridden by passing a string value for the constructor's version parameter instead of ``None`` or an int. """# create a pseudo standard pathversion=self.versionifisinstance(self.version,str)elsef"version_{self.version}"log_dir=os.path.join(self.root_dir,self.name,version)returnlog_dir@property@rank_zero_experimentdefexperiment(self)->"_ExperimentWriter":"""Actual ExperimentWriter object. To use ExperimentWriter features anywhere in your code, do the following. Example:: self.logger.experiment.some_experiment_writer_function() """ifself._experimentisnotNone:returnself._experimentos.makedirs(self.root_dir,exist_ok=True)self._experiment=_ExperimentWriter(log_dir=self.log_dir)returnself._experiment
[docs]@rank_zero_onlydeflog_hyperparams(self,params:Union[Dict[str,Any],Namespace])->None:raiseNotImplementedError("The `CSVLogger` does not yet support logging hyperparameters.")
[docs]@rank_zero_onlydeffinalize(self,status:str)->None:ifself._experimentisNone:# When using multiprocessing, finalize() should be a no-op on the main process, as no experiment has been# initialized therereturnself.save()
class_ExperimentWriter:r""" Experiment writer for CSVLogger. Args: log_dir: Directory for the experiment logs """NAME_METRICS_FILE="metrics.csv"def__init__(self,log_dir:str)->None:self.metrics:List[Dict[str,float]]=[]self.log_dir=log_dirifos.path.exists(self.log_dir)andos.listdir(self.log_dir):rank_zero_warn(f"Experiment logs directory {self.log_dir} exists and is not empty."" Previous log files in this directory will be deleted when the new ones are saved!")os.makedirs(self.log_dir,exist_ok=True)self.metrics_file_path=os.path.join(self.log_dir,self.NAME_METRICS_FILE)deflog_metrics(self,metrics_dict:Dict[str,float],step:Optional[int]=None)->None:"""Record metrics."""def_handle_value(value:Union[Tensor,Any])->Any:ifisinstance(value,Tensor):returnvalue.item()returnvalueifstepisNone:step=len(self.metrics)metrics={k:_handle_value(v)fork,vinmetrics_dict.items()}metrics["step"]=stepself.metrics.append(metrics)defsave(self)->None:"""Save recorded metrics into files."""ifnotself.metrics:returnlast_m={}forminself.metrics:last_m.update(m)metrics_keys=list(last_m.keys())withopen(self.metrics_file_path,"w",newline="")asf:writer=csv.DictWriter(f,fieldnames=metrics_keys)writer.writeheader()writer.writerows(self.metrics)
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. Read PyTorch Lightning's Privacy Policy.