diff --git a/pytorch_lightning/loggers/base.py b/pytorch_lightning/loggers/base.py index 857d661fdb5b3..823b9830272fa 100644 --- a/pytorch_lightning/loggers/base.py +++ b/pytorch_lightning/loggers/base.py @@ -292,6 +292,41 @@ def version(self) -> str: return '_'.join([str(logger.version) for logger in self._logger_iterable]) +class DummyExperiment(object): + """ Dummy experiment """ + def nop(*args, **kw): + pass + + def __getattr__(self, _): + return self.nop + + +class DummyLogger(LightningLoggerBase): + """ Dummy logger for internal use. Is usefull if we want to disable users + logger for a feature, but still secure that users code can run """ + def __init__(self): + super().__init__() + self._experiment = DummyExperiment() + + @property + def experiment(self): + return self._experiment + + def log_metrics(self, metrics, step): + pass + + def log_hyperparams(self, params): + pass + + @property + def name(self): + pass + + @property + def version(self): + pass + + def merge_dicts( dicts: Sequence[Mapping], agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None, diff --git a/pytorch_lightning/trainer/lr_finder.py b/pytorch_lightning/trainer/lr_finder.py index 0ca41d2d54f99..7554a670bb988 100755 --- a/pytorch_lightning/trainer/lr_finder.py +++ b/pytorch_lightning/trainer/lr_finder.py @@ -13,6 +13,7 @@ from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.callbacks import Callback +from pytorch_lightning.loggers.base import DummyLogger from pytorch_lightning import _logger as log from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities import rank_zero_warn @@ -133,7 +134,7 @@ def lr_find(self, progress_bar_refresh_rate=1)] # No logging - self.logger = None + self.logger = DummyLogger() # Max step set to number of iterations self.max_steps = num_training diff --git a/pytorch_lightning/trainer/training_tricks.py b/pytorch_lightning/trainer/training_tricks.py index b8f29a4c891e9..2a9adaf568f90 100644 --- a/pytorch_lightning/trainer/training_tricks.py +++ b/pytorch_lightning/trainer/training_tricks.py @@ -12,6 +12,7 @@ from pytorch_lightning import _logger as log from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.callbacks import GradientAccumulationScheduler +from pytorch_lightning.loggers.base import DummyLogger from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.memory import is_oom_error, garbage_collection_cuda @@ -195,7 +196,7 @@ def __scale_batch_reset_params(self, model, steps_per_trial): self.auto_scale_batch_size = None # prevent recursion self.max_steps = steps_per_trial # take few steps self.weights_summary = None # not needed before full run - self.logger = None # not needed before full run + self.logger = DummyLogger() self.callbacks = [] # not needed before full run self.checkpoint_callback = False # required for saving self.early_stop_callback = None