Skip to content

Commit

Permalink
dummy logger (#1836)
Browse files Browse the repository at this point in the history
Co-authored-by: Nicki Skafte <nugginea@gmail.com>
  • Loading branch information
SkafteNicki and Nicki Skafte committed May 14, 2020
1 parent 1c10560 commit 88f816e
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 2 deletions.
35 changes: 35 additions & 0 deletions pytorch_lightning/loggers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,41 @@ def version(self) -> str:
return '_'.join([str(logger.version) for logger in self._logger_iterable])


class DummyExperiment(object):
""" Dummy experiment """
def nop(*args, **kw):
pass

def __getattr__(self, _):
return self.nop


class DummyLogger(LightningLoggerBase):
""" Dummy logger for internal use. Is usefull if we want to disable users
logger for a feature, but still secure that users code can run """
def __init__(self):
super().__init__()
self._experiment = DummyExperiment()

@property
def experiment(self):
return self._experiment

def log_metrics(self, metrics, step):
pass

def log_hyperparams(self, params):
pass

@property
def name(self):
pass

@property
def version(self):
pass


def merge_dicts(
dicts: Sequence[Mapping],
agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None,
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/trainer/lr_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.loggers.base import DummyLogger
from pytorch_lightning import _logger as log
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities import rank_zero_warn
Expand Down Expand Up @@ -133,7 +134,7 @@ def lr_find(self,
progress_bar_refresh_rate=1)]

# No logging
self.logger = None
self.logger = DummyLogger()

# Max step set to number of iterations
self.max_steps = num_training
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/trainer/training_tricks.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from pytorch_lightning import _logger as log
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.callbacks import GradientAccumulationScheduler
from pytorch_lightning.loggers.base import DummyLogger
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.memory import is_oom_error, garbage_collection_cuda

Expand Down Expand Up @@ -195,7 +196,7 @@ def __scale_batch_reset_params(self, model, steps_per_trial):
self.auto_scale_batch_size = None # prevent recursion
self.max_steps = steps_per_trial # take few steps
self.weights_summary = None # not needed before full run
self.logger = None # not needed before full run
self.logger = DummyLogger()
self.callbacks = [] # not needed before full run
self.checkpoint_callback = False # required for saving
self.early_stop_callback = None
Expand Down

0 comments on commit 88f816e

Please sign in to comment.