# Copyright The Lightning AI team.## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.importosimportsysfromfunctoolsimportpartial,update_wrapperfromtypesimportMethodTypefromtypingimportAny,Callable,Dict,Iterable,List,Optional,Set,Tuple,Type,Unionimporttorchfromlightning_utilities.core.importsimportRequirementCachefromlightning_utilities.core.rank_zeroimport_warnfromtorch.optimimportOptimizerimportpytorch_lightningasplfromlightning_fabric.utilities.cloud_ioimportget_filesystemfromlightning_fabric.utilities.typesimport_TORCH_LRSCHEDULERfrompytorch_lightningimportCallback,LightningDataModule,LightningModule,seed_everything,Trainerfrompytorch_lightning.utilities.exceptionsimportMisconfigurationExceptionfrompytorch_lightning.utilities.model_helpersimportis_overriddenfrompytorch_lightning.utilities.rank_zeroimportrank_zero_deprecation,rank_zero_warn_JSONARGPARSE_SIGNATURES_AVAILABLE=RequirementCache("jsonargparse[signatures]>=4.17.0")if_JSONARGPARSE_SIGNATURES_AVAILABLE:importdocstring_parserfromjsonargparseimport(ActionConfigFile,ArgumentParser,class_from_function,Namespace,register_unresolvable_import_paths,set_config_read_mode,)register_unresolvable_import_paths(torch)# Required until fix https://github.com/pytorch/pytorch/issues/74483set_config_read_mode(fsspec_enabled=True)else:locals()["ArgumentParser"]=objectlocals()["Namespace"]=objectclassReduceLROnPlateau(torch.optim.lr_scheduler.ReduceLROnPlateau):def__init__(self,optimizer:Optimizer,monitor:str,*args:Any,**kwargs:Any)->None:super().__init__(optimizer,*args,**kwargs)self.monitor=monitor# LightningCLI requires the ReduceLROnPlateau defined here, thus it shouldn't accept the one from pytorch:LRSchedulerTypeTuple=(_TORCH_LRSCHEDULER,ReduceLROnPlateau)LRSchedulerTypeUnion=Union[_TORCH_LRSCHEDULER,ReduceLROnPlateau]LRSchedulerType=Union[Type[_TORCH_LRSCHEDULER],Type[ReduceLROnPlateau]]# Type aliases intended for convenience of CLI developersArgsType=Optional[Union[List[str],Dict[str,Any],Namespace]]OptimizerCallable=Callable[[Iterable],Optimizer]LRSchedulerCallable=Callable[[Optimizer],Union[_TORCH_LRSCHEDULER,ReduceLROnPlateau]]
[docs]classLightningArgumentParser(ArgumentParser):"""Extension of jsonargparse's ArgumentParser for pytorch-lightning."""def__init__(self,*args:Any,description:str="pytorch-lightning trainer command line tool",env_prefix:str="PL",default_env:bool=False,**kwargs:Any,)->None:"""Initialize argument parser that supports configuration file input. For full details of accepted arguments see `ArgumentParser.__init__ <https://jsonargparse.readthedocs.io/en/stable/#jsonargparse.ArgumentParser.__init__>`_. Args: description: Description of the tool shown when running ``--help``. env_prefix: Prefix for environment variables. Set ``default_env=True`` to enable env parsing. default_env: Whether to parse environment variables. """ifnot_JSONARGPARSE_SIGNATURES_AVAILABLE:raiseModuleNotFoundError(f"{_JSONARGPARSE_SIGNATURES_AVAILABLE}. Try `pip install -U 'jsonargparse[signatures]'`.")super().__init__(*args,description=description,env_prefix=env_prefix,default_env=default_env,**kwargs)self.callback_keys:List[str]=[]# separate optimizers and lr schedulers to know which were addedself._optimizers:Dict[str,Tuple[Union[Type,Tuple[Type,...]],str]]={}self._lr_schedulers:Dict[str,Tuple[Union[Type,Tuple[Type,...]],str]]={}
[docs]defadd_lightning_class_args(self,lightning_class:Union[Callable[...,Union[Trainer,LightningModule,LightningDataModule,Callback]],Type[Trainer],Type[LightningModule],Type[LightningDataModule],Type[Callback],],nested_key:str,subclass_mode:bool=False,required:bool=True,)->List[str]:"""Adds arguments from a lightning class to a nested key of the parser. Args: lightning_class: A callable or any subclass of {Trainer, LightningModule, LightningDataModule, Callback}. nested_key: Name of the nested namespace to store arguments. subclass_mode: Whether allow any subclass of the given class. required: Whether the argument group is required. Returns: A list with the names of the class arguments added. """ifcallable(lightning_class)andnotisinstance(lightning_class,type):lightning_class=class_from_function(lightning_class)ifisinstance(lightning_class,type)andissubclass(lightning_class,(Trainer,LightningModule,LightningDataModule,Callback)):ifissubclass(lightning_class,Callback):self.callback_keys.append(nested_key)ifsubclass_mode:returnself.add_subclass_arguments(lightning_class,nested_key,fail_untyped=False,required=required)returnself.add_class_arguments(lightning_class,nested_key,fail_untyped=False,instantiate=notissubclass(lightning_class,Trainer),sub_configs=True,)raiseMisconfigurationException(f"Cannot add arguments from: {lightning_class}. You should provide either a callable or a subclass of: ""Trainer, LightningModule, LightningDataModule, or Callback.")
[docs]defadd_optimizer_args(self,optimizer_class:Union[Type[Optimizer],Tuple[Type[Optimizer],...]]=(Optimizer,),nested_key:str="optimizer",link_to:str="AUTOMATIC",)->None:"""Adds arguments from an optimizer class to a nested key of the parser. Args: optimizer_class: Any subclass of :class:`torch.optim.Optimizer`. Use tuple to allow subclasses. nested_key: Name of the nested namespace to store arguments. link_to: Dot notation of a parser key to set arguments or AUTOMATIC. """ifisinstance(optimizer_class,tuple):assertall(issubclass(o,Optimizer)foroinoptimizer_class)else:assertissubclass(optimizer_class,Optimizer)kwargs:Dict[str,Any]={"instantiate":False,"fail_untyped":False,"skip":{"params"}}ifisinstance(optimizer_class,tuple):self.add_subclass_arguments(optimizer_class,nested_key,**kwargs)else:self.add_class_arguments(optimizer_class,nested_key,sub_configs=True,**kwargs)self._optimizers[nested_key]=(optimizer_class,link_to)
[docs]defadd_lr_scheduler_args(self,lr_scheduler_class:Union[LRSchedulerType,Tuple[LRSchedulerType,...]]=LRSchedulerTypeTuple,nested_key:str="lr_scheduler",link_to:str="AUTOMATIC",)->None:"""Adds arguments from a learning rate scheduler class to a nested key of the parser. Args: lr_scheduler_class: Any subclass of ``torch.optim.lr_scheduler.{_LRScheduler, ReduceLROnPlateau}``. Use tuple to allow subclasses. nested_key: Name of the nested namespace to store arguments. link_to: Dot notation of a parser key to set arguments or AUTOMATIC. """ifisinstance(lr_scheduler_class,tuple):assertall(issubclass(o,LRSchedulerTypeTuple)foroinlr_scheduler_class)else:assertissubclass(lr_scheduler_class,LRSchedulerTypeTuple)kwargs:Dict[str,Any]={"instantiate":False,"fail_untyped":False,"skip":{"optimizer"}}ifisinstance(lr_scheduler_class,tuple):self.add_subclass_arguments(lr_scheduler_class,nested_key,**kwargs)else:self.add_class_arguments(lr_scheduler_class,nested_key,sub_configs=True,**kwargs)self._lr_schedulers[nested_key]=(lr_scheduler_class,link_to)
[docs]classSaveConfigCallback(Callback):"""Saves a LightningCLI config to the log_dir when training starts. Args: parser: The parser object used to parse the configuration. config: The parsed configuration that will be saved. config_filename: Filename for the config file. overwrite: Whether to overwrite an existing config file. multifile: When input is multiple config files, saved config preserves this structure. Raises: RuntimeError: If the config file already exists in the directory to avoid overwriting a previous run """def__init__(self,parser:LightningArgumentParser,config:Namespace,config_filename:str="config.yaml",overwrite:bool=False,multifile:bool=False,)->None:self.parser=parserself.config=configself.config_filename=config_filenameself.overwrite=overwriteself.multifile=multifileself.already_saved=False
[docs]defsetup(self,trainer:Trainer,pl_module:LightningModule,stage:str)->None:ifself.already_saved:returnlog_dir=trainer.log_dir# this broadcasts the directoryassertlog_dirisnotNoneconfig_path=os.path.join(log_dir,self.config_filename)fs=get_filesystem(log_dir)ifnotself.overwrite:# check if the file exists on rank 0file_exists=fs.isfile(config_path)iftrainer.is_global_zeroelseFalse# broadcast whether to fail to all ranksfile_exists=trainer.strategy.broadcast(file_exists)iffile_exists:raiseRuntimeError(f"{self.__class__.__name__} expected {config_path} to NOT exist. Aborting to avoid overwriting"" results of a previous run. You can delete the previous config file,"" set `LightningCLI(save_config_callback=None)` to disable config saving,"' or set `LightningCLI(save_config_kwargs={"overwrite": True})` to overwrite the config file.')# save the file on rank 0iftrainer.is_global_zero:# save only on rank zero to avoid race conditions.# the `log_dir` needs to be created as we rely on the logger to do it usually# but it hasn't logged anything at this pointfs.makedirs(log_dir,exist_ok=True)self.parser.save(self.config,config_path,skip_none=False,overwrite=self.overwrite,multifile=self.multifile)self.already_saved=True# broadcast so that all ranks are in sync on future calls to .setup()self.already_saved=trainer.strategy.broadcast(self.already_saved)
[docs]classLightningCLI:"""Implementation of a configurable command line tool for pytorch-lightning."""def__init__(self,model_class:Optional[Union[Type[LightningModule],Callable[...,LightningModule]]]=None,datamodule_class:Optional[Union[Type[LightningDataModule],Callable[...,LightningDataModule]]]=None,save_config_callback:Optional[Type[SaveConfigCallback]]=SaveConfigCallback,save_config_kwargs:Optional[Dict[str,Any]]=None,trainer_class:Union[Type[Trainer],Callable[...,Trainer]]=Trainer,trainer_defaults:Optional[Dict[str,Any]]=None,seed_everything_default:Union[bool,int]=True,parser_kwargs:Optional[Union[Dict[str,Any],Dict[str,Dict[str,Any]]]]=None,subclass_mode_model:bool=False,subclass_mode_data:bool=False,args:ArgsType=None,run:bool=True,auto_configure_optimizers:bool=True,**kwargs:Any,# Remove with deprecations of v2.0.0)->None:"""Receives as input pytorch-lightning classes (or callables which return pytorch-lightning classes), which are called / instantiated using a parsed configuration file and / or command line args. Parsing of configuration from environment variables can be enabled by setting ``parser_kwargs={"default_env": True}``. A full configuration yaml would be parsed from ``PL_CONFIG`` if set. Individual settings are so parsed from variables named for example ``PL_TRAINER__MAX_EPOCHS``. For more info, read :ref:`the CLI docs <lightning-cli>`. .. warning:: ``LightningCLI`` is in beta and subject to change. Args: model_class: An optional :class:`~pytorch_lightning.core.module.LightningModule` class to train on or a callable which returns a :class:`~pytorch_lightning.core.module.LightningModule` instance when called. If ``None``, you can pass a registered model with ``--model=MyModel``. datamodule_class: An optional :class:`~pytorch_lightning.core.datamodule.LightningDataModule` class or a callable which returns a :class:`~pytorch_lightning.core.datamodule.LightningDataModule` instance when called. If ``None``, you can pass a registered datamodule with ``--data=MyDataModule``. save_config_callback: A callback class to save the config. save_config_kwargs: Parameters that will be used to instantiate the save_config_callback. trainer_class: An optional subclass of the :class:`~pytorch_lightning.trainer.trainer.Trainer` class or a callable which returns a :class:`~pytorch_lightning.trainer.trainer.Trainer` instance when called. trainer_defaults: Set to override Trainer defaults or add persistent callbacks. The callbacks added through this argument will not be configurable from a configuration file and will always be present for this particular CLI. Alternatively, configurable callbacks can be added as explained in :ref:`the CLI docs <lightning-cli>`. seed_everything_default: Number for the :func:`~lightning_fabric.utilities.seed.seed_everything` seed value. Set to True to automatically choose a seed value. Setting it to False will avoid calling ``seed_everything``. parser_kwargs: Additional arguments to instantiate each ``LightningArgumentParser``. subclass_mode_model: Whether model can be any `subclass <https://jsonargparse.readthedocs.io/en/stable/#class-type-and-sub-classes>`_ of the given class. subclass_mode_data: Whether datamodule can be any `subclass <https://jsonargparse.readthedocs.io/en/stable/#class-type-and-sub-classes>`_ of the given class. args: Arguments to parse. If ``None`` the arguments are taken from ``sys.argv``. Command line style arguments can be given in a ``list``. Alternatively, structured config options can be given in a ``dict`` or ``jsonargparse.Namespace``. run: Whether subcommands should be added to run a :class:`~pytorch_lightning.trainer.trainer.Trainer` method. If set to ``False``, the trainer and model classes will be instantiated only. """self.save_config_callback=save_config_callbackself.save_config_kwargs=save_config_kwargsor{}self.trainer_class=trainer_classself.trainer_defaults=trainer_defaultsor{}self.seed_everything_default=seed_everything_defaultself.parser_kwargs=parser_kwargsor{}# type: ignore[var-annotated] # github.com/python/mypy/issues/6463self.auto_configure_optimizers=auto_configure_optimizersself._handle_deprecated_params(kwargs)self.model_class=model_class# used to differentiate between the original value and the processed valueself._model_class=model_classorLightningModuleself.subclass_mode_model=(model_classisNone)orsubclass_mode_modelself.datamodule_class=datamodule_class# used to differentiate between the original value and the processed valueself._datamodule_class=datamodule_classorLightningDataModuleself.subclass_mode_data=(datamodule_classisNone)orsubclass_mode_datamain_kwargs,subparser_kwargs=self._setup_parser_kwargs(self.parser_kwargs)self.setup_parser(run,main_kwargs,subparser_kwargs)self.parse_arguments(self.parser,args)self.subcommand=self.config["subcommand"]ifrunelseNoneself._set_seed()self.before_instantiate_classes()self.instantiate_classes()ifself.subcommandisnotNone:self._run_subcommand(self.subcommand)def_handle_deprecated_params(self,kwargs:dict)->None:fornameinkwargs.keys()&["save_config_filename","save_config_overwrite","save_config_multifile"]:value=kwargs.pop(name)key=name.replace("save_config_","").replace("filename","config_filename")self.save_config_kwargs[key]=valuerank_zero_deprecation(f"LightningCLI's {name!r} init parameter is deprecated from v1.8 and will "f"be removed in v2.0.0. Use `save_config_kwargs={{'{key}': ...}}` instead.")fornameinkwargs.keys()&["description","env_prefix","env_parse"]:value=kwargs.pop(name)key=name.replace("env_parse","default_env")self.parser_kwargs[key]=valuerank_zero_deprecation(f"LightningCLI's {name!r} init parameter is deprecated from v1.9 and will "f"be removed in v2.0. Use `parser_kwargs={{'{key}': ...}}` instead.")ifkwargs:raiseValueError(f"Unexpected keyword parameters: {kwargs}")def_setup_parser_kwargs(self,parser_kwargs:Dict[str,Any])->Tuple[Dict[str,Any],Dict[str,Any]]:subcommand_names=self.subcommands().keys()main_kwargs={k:vfork,vinparser_kwargs.items()ifknotinsubcommand_names}subparser_kwargs={k:vfork,vinparser_kwargs.items()ifkinsubcommand_names}returnmain_kwargs,subparser_kwargs
[docs]definit_parser(self,**kwargs:Any)->LightningArgumentParser:"""Method that instantiates the argument parser."""kwargs.setdefault("dump_header",[f"pytorch_lightning=={pl.__version__}"])parser=LightningArgumentParser(**kwargs)parser.add_argument("-c","--config",action=ActionConfigFile,help="Path to a configuration file in json or yaml format.")returnparser
[docs]defsetup_parser(self,add_subcommands:bool,main_kwargs:Dict[str,Any],subparser_kwargs:Dict[str,Any])->None:"""Initialize and setup the parser, subcommands, and arguments."""self.parser=self.init_parser(**main_kwargs)ifadd_subcommands:self._subcommand_method_arguments:Dict[str,List[str]]={}self._add_subcommands(self.parser,**subparser_kwargs)else:self._add_arguments(self.parser)
[docs]defadd_default_arguments_to_parser(self,parser:LightningArgumentParser)->None:"""Adds default arguments to the parser."""parser.add_argument("--seed_everything",type=Union[bool,int],default=self.seed_everything_default,help=("Set to an int to run seed_everything with this value before classes instantiation.""Set to True to use a random seed."),)
[docs]defadd_core_arguments_to_parser(self,parser:LightningArgumentParser)->None:"""Adds arguments from the core classes to the parser."""parser.add_lightning_class_args(self.trainer_class,"trainer")trainer_defaults={"trainer."+k:vfork,vinself.trainer_defaults.items()ifk!="callbacks"}parser.set_defaults(trainer_defaults)parser.add_lightning_class_args(self._model_class,"model",subclass_mode=self.subclass_mode_model)ifself.datamodule_classisnotNone:parser.add_lightning_class_args(self._datamodule_class,"data",subclass_mode=self.subclass_mode_data)else:# this should not be required because the user might want to use the `LightningModule` dataloadersparser.add_lightning_class_args(self._datamodule_class,"data",subclass_mode=self.subclass_mode_data,required=False)
def_add_arguments(self,parser:LightningArgumentParser)->None:# default + core + custom argumentsself.add_default_arguments_to_parser(parser)self.add_core_arguments_to_parser(parser)self.add_arguments_to_parser(parser)# add default optimizer args if necessaryifself.auto_configure_optimizers:ifnotparser._optimizers:# already added by the user in `add_arguments_to_parser`parser.add_optimizer_args((Optimizer,))ifnotparser._lr_schedulers:# already added by the user in `add_arguments_to_parser`parser.add_lr_scheduler_args(LRSchedulerTypeTuple)self.link_optimizers_and_lr_schedulers(parser)
[docs]defadd_arguments_to_parser(self,parser:LightningArgumentParser)->None:"""Implement to add extra arguments to the parser or link arguments. Args: parser: The parser object to which arguments can be added """
[docs]@staticmethoddefsubcommands()->Dict[str,Set[str]]:"""Defines the list of available subcommands and the arguments to skip."""return{"fit":{"model","train_dataloaders","val_dataloaders","datamodule"},"validate":{"model","dataloaders","datamodule"},"test":{"model","dataloaders","datamodule"},"predict":{"model","dataloaders","datamodule"},"tune":{"model","train_dataloaders","val_dataloaders","datamodule"},}
def_add_subcommands(self,parser:LightningArgumentParser,**kwargs:Any)->None:"""Adds subcommands to the input parser."""self._subcommand_parsers:Dict[str,LightningArgumentParser]={}parser_subcommands=parser.add_subcommands()# the user might have passed a builder functiontrainer_class=(self.trainer_classifisinstance(self.trainer_class,type)elseclass_from_function(self.trainer_class))# register all subcommands in separate subcommand parsers under the main parserforsubcommandinself.subcommands():fn=getattr(trainer_class,subcommand)# extract the first line description in the docstring for the subcommand help messagedescription=_get_short_description(fn)subparser_kwargs=kwargs.get(subcommand,{})subparser_kwargs.setdefault("description",description)subcommand_parser=self._prepare_subcommand_parser(trainer_class,subcommand,**subparser_kwargs)self._subcommand_parsers[subcommand]=subcommand_parserparser_subcommands.add_subcommand(subcommand,subcommand_parser,help=description)def_prepare_subcommand_parser(self,klass:Type,subcommand:str,**kwargs:Any)->LightningArgumentParser:parser=self.init_parser(**kwargs)self._add_arguments(parser)# subcommand argumentsskip:Set[Union[str,int]]=set(self.subcommands()[subcommand])added=parser.add_method_arguments(klass,subcommand,skip=skip)# need to save which arguments were added to pass them to the method laterself._subcommand_method_arguments[subcommand]=addedreturnparser
[docs]@staticmethoddeflink_optimizers_and_lr_schedulers(parser:LightningArgumentParser)->None:"""Creates argument links for optimizers and learning rate schedulers that specified a ``link_to``."""optimizers_and_lr_schedulers={**parser._optimizers,**parser._lr_schedulers}forkey,(class_type,link_to)inoptimizers_and_lr_schedulers.items():iflink_to=="AUTOMATIC":continueifisinstance(class_type,tuple):parser.link_arguments(key,link_to)else:add_class_path=_add_class_path_generator(class_type)parser.link_arguments(key,link_to,compute_fn=add_class_path)
[docs]defparse_arguments(self,parser:LightningArgumentParser,args:ArgsType)->None:"""Parses command line arguments and stores it in ``self.config``."""ifargsisnotNoneandlen(sys.argv)>1:rank_zero_warn("LightningCLI's args parameter is intended to run from within Python like if it were from the command ""line. To prevent mistakes it is not recommended to provide both args and command line arguments, got: "f"sys.argv[1:]={sys.argv[1:]}, args={args}.")ifisinstance(args,(dict,Namespace)):self.config=parser.parse_object(args)else:self.config=parser.parse_args(args)
[docs]defbefore_instantiate_classes(self)->None:"""Implement to run some code before instantiating the classes."""
[docs]definstantiate_classes(self)->None:"""Instantiates the classes and sets their attributes."""self.config_init=self.parser.instantiate_classes(self.config)self.datamodule=self._get(self.config_init,"data")self.model=self._get(self.config_init,"model")self._add_configure_optimizers_method_to_model(self.subcommand)self.trainer=self.instantiate_trainer()
[docs]definstantiate_trainer(self,**kwargs:Any)->Trainer:"""Instantiates the trainer. Args: kwargs: Any custom trainer arguments. """extra_callbacks=[self._get(self.config_init,c)forcinself._parser(self.subcommand).callback_keys]trainer_config={**self._get(self.config_init,"trainer",default={}),**kwargs}returnself._instantiate_trainer(trainer_config,extra_callbacks)
def_instantiate_trainer(self,config:Dict[str,Any],callbacks:List[Callback])->Trainer:key="callbacks"ifkeyinconfig:ifconfig[key]isNone:config[key]=[]elifnotisinstance(config[key],list):config[key]=[config[key]]config[key].extend(callbacks)ifkeyinself.trainer_defaults:value=self.trainer_defaults[key]config[key]+=valueifisinstance(value,list)else[value]ifself.save_config_callbackandnotconfig.get("fast_dev_run",False):config_callback=self.save_config_callback(self._parser(self.subcommand),self.config.get(str(self.subcommand),self.config),**self.save_config_kwargs,)config[key].append(config_callback)else:rank_zero_warn(f"The `{self.trainer_class.__qualname__}` class does not expose the `{key}` argument so they will"" not be included.")returnself.trainer_class(**config)def_parser(self,subcommand:Optional[str])->LightningArgumentParser:ifsubcommandisNone:returnself.parser# return the subcommand parser for the subcommand passedreturnself._subcommand_parsers[subcommand]
[docs]@staticmethoddefconfigure_optimizers(lightning_module:LightningModule,optimizer:Optimizer,lr_scheduler:Optional[LRSchedulerTypeUnion]=None)->Any:"""Override to customize the :meth:`~pytorch_lightning.core.module.LightningModule.configure_optimizers` method. Args: lightning_module: A reference to the model. optimizer: The optimizer. lr_scheduler: The learning rate scheduler (if used). """iflr_schedulerisNone:returnoptimizerifisinstance(lr_scheduler,ReduceLROnPlateau):return{"optimizer":optimizer,"lr_scheduler":{"scheduler":lr_scheduler,"monitor":lr_scheduler.monitor},}return[optimizer],[lr_scheduler]
def_add_configure_optimizers_method_to_model(self,subcommand:Optional[str])->None:"""Overrides the model's :meth:`~pytorch_lightning.core.module.LightningModule.configure_optimizers` method if a single optimizer and optionally a scheduler argument groups are added to the parser as 'AUTOMATIC'."""ifnotself.auto_configure_optimizers:returnparser=self._parser(subcommand)defget_automatic(class_type:Union[Type,Tuple[Type,...]],register:Dict[str,Tuple[Union[Type,Tuple[Type,...]],str]])->List[str]:automatic=[]forkey,(base_class,link_to)inregister.items():ifnotisinstance(base_class,tuple):base_class=(base_class,)iflink_to=="AUTOMATIC"andany(issubclass(c,class_type)forcinbase_class):automatic.append(key)returnautomaticoptimizers=get_automatic(Optimizer,parser._optimizers)lr_schedulers=get_automatic(LRSchedulerTypeTuple,parser._lr_schedulers)iflen(optimizers)==0:returniflen(optimizers)>1orlen(lr_schedulers)>1:raiseMisconfigurationException(f"`{self.__class__.__name__}.add_configure_optimizers_method_to_model` expects at most one optimizer "f"and one lr_scheduler to be 'AUTOMATIC', but found {optimizers+lr_schedulers}. In this case the user ""is expected to link the argument groups and implement `configure_optimizers`, see ""https://lightning.ai/docs/pytorch/LTS/common/lightning_cli.html""#optimizers-and-learning-rate-schedulers")optimizer_class=parser._optimizers[optimizers[0]][0]optimizer_init=self._get(self.config_init,optimizers[0])ifnotisinstance(optimizer_class,tuple):optimizer_init=_global_add_class_path(optimizer_class,optimizer_init)ifnotoptimizer_init:# optimizers were registered automatically but not passed by the userreturnlr_scheduler_init=Noneiflr_schedulers:lr_scheduler_class=parser._lr_schedulers[lr_schedulers[0]][0]lr_scheduler_init=self._get(self.config_init,lr_schedulers[0])ifnotisinstance(lr_scheduler_class,tuple):lr_scheduler_init=_global_add_class_path(lr_scheduler_class,lr_scheduler_init)ifis_overridden("configure_optimizers",self.model):_warn(f"`{self.model.__class__.__name__}.configure_optimizers` will be overridden by "f"`{self.__class__.__name__}.configure_optimizers`.")optimizer=instantiate_class(self.model.parameters(),optimizer_init)lr_scheduler=instantiate_class(optimizer,lr_scheduler_init)iflr_scheduler_initelseNonefn=partial(self.configure_optimizers,optimizer=optimizer,lr_scheduler=lr_scheduler)update_wrapper(fn,self.configure_optimizers)# necessary for `is_overridden`# override the existing methodself.model.configure_optimizers=MethodType(fn,self.model)def_get(self,config:Namespace,key:str,default:Optional[Any]=None)->Any:"""Utility to get a config value which might be inside a subcommand."""returnconfig.get(str(self.subcommand),config).get(key,default)def_run_subcommand(self,subcommand:str)->None:"""Run the chosen subcommand."""before_fn=getattr(self,f"before_{subcommand}",None)ifcallable(before_fn):before_fn()default=getattr(self.trainer,subcommand)fn=getattr(self,subcommand,default)fn_kwargs=self._prepare_subcommand_kwargs(subcommand)fn(**fn_kwargs)after_fn=getattr(self,f"after_{subcommand}",None)ifcallable(after_fn):after_fn()def_prepare_subcommand_kwargs(self,subcommand:str)->Dict[str,Any]:"""Prepares the keyword arguments to pass to the subcommand to run."""fn_kwargs={k:vfork,vinself.config_init[subcommand].items()ifkinself._subcommand_method_arguments[subcommand]}fn_kwargs["model"]=self.modelifself.datamoduleisnotNone:fn_kwargs["datamodule"]=self.datamodulereturnfn_kwargsdef_set_seed(self)->None:"""Sets the seed."""config_seed=self._get(self.config,"seed_everything")ifconfig_seedisFalse:returnifconfig_seedisTrue:# user requested seeding, choose randomlyconfig_seed=seed_everything(workers=True)else:config_seed=seed_everything(config_seed,workers=True)self.config["seed_everything"]=config_seed
def_class_path_from_class(class_type:Type)->str:returnclass_type.__module__+"."+class_type.__name__def_global_add_class_path(class_type:Type,init_args:Optional[Union[Namespace,Dict[str,Any]]]=None)->Dict[str,Any]:ifisinstance(init_args,Namespace):init_args=init_args.as_dict()return{"class_path":_class_path_from_class(class_type),"init_args":init_argsor{}}def_add_class_path_generator(class_type:Type)->Callable[[Namespace],Dict[str,Any]]:defadd_class_path(init_args:Namespace)->Dict[str,Any]:return_global_add_class_path(class_type,init_args)returnadd_class_pathdefinstantiate_class(args:Union[Any,Tuple[Any,...]],init:Dict[str,Any])->Any:"""Instantiates a class with the given args and init. Args: args: Positional arguments required for instantiation. init: Dict of the form {"class_path":...,"init_args":...}. Returns: The instantiated class object. """kwargs=init.get("init_args",{})ifnotisinstance(args,tuple):args=(args,)class_module,class_name=init["class_path"].rsplit(".",1)module=__import__(class_module,fromlist=[class_name])args_class=getattr(module,class_name)returnargs_class(*args,**kwargs)def_get_short_description(component:object)->Optional[str]:ifcomponent.__doc__isNone:returnNonetry:docstring=docstring_parser.parse(component.__doc__)returndocstring.short_descriptionexcept(ValueError,docstring_parser.ParseError)asex:rank_zero_warn(f"Failed parsing docstring for {component}: {ex}")
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. Read PyTorch Lightning's Privacy Policy.