Skip to content

Commit

Permalink
rename logging -> loggers (#767)
Browse files Browse the repository at this point in the history
* move logging >> loggers

* add warning

* fix tests

* logging alias

* formatting

* formatting
  • Loading branch information
Borda authored Feb 1, 2020
1 parent 784a053 commit 76a1c67
Show file tree
Hide file tree
Showing 26 changed files with 184 additions and 195 deletions.
8 changes: 4 additions & 4 deletions pl_examples/basic_examples/lightning_module_template.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""
Example template for defining a system
"""
import logging
import logging as log
import os
from argparse import ArgumentParser
from collections import OrderedDict
Expand Down Expand Up @@ -215,17 +215,17 @@ def __dataloader(self, train):

@pl.data_loader
def train_dataloader(self):
logging.info('training data loader called')
log.info('Training data loader called.')
return self.__dataloader(train=True)

@pl.data_loader
def val_dataloader(self):
logging.info('val data loader called')
log.info('Validation data loader called.')
return self.__dataloader(train=False)

@pl.data_loader
def test_dataloader(self):
logging.info('test data loader called')
log.info('Test data loader called.')
return self.__dataloader(train=False)

@staticmethod
Expand Down
5 changes: 3 additions & 2 deletions pytorch_lightning/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,15 @@
# We are not importing the rest of the scikit during the build
# process, as it may not be compiled yet
else:
import logging as log
log.basicConfig(level=log.INFO)

from .trainer.trainer import Trainer
from .core.lightning import LightningModule
from .core.decorators import data_loader
import logging

__all__ = [
'Trainer',
'LightningModule',
'data_loader',
]
logging.basicConfig(level=logging.INFO)
15 changes: 8 additions & 7 deletions pytorch_lightning/callbacks/pt_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@

import os
import shutil
import logging
import logging as log
import warnings

import numpy as np

from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
Expand Down Expand Up @@ -113,7 +114,7 @@ def __init__(self, monitor='val_loss',

if mode not in ['auto', 'min', 'max']:
if self.verbose > 0:
logging.info(f'EarlyStopping mode {mode} is unknown, fallback to auto mode.')
log.info(f'EarlyStopping mode {mode} is unknown, fallback to auto mode.')
mode = 'auto'

if mode == 'min':
Expand Down Expand Up @@ -175,7 +176,7 @@ def on_epoch_end(self, epoch, logs=None):

def on_train_end(self, logs=None):
if self.stopped_epoch > 0 and self.verbose > 0:
logging.info(f'Epoch {self.stopped_epoch + 1:05d}: early stopping')
log.info(f'Epoch {self.stopped_epoch + 1:05d}: early stopping')


class ModelCheckpoint(Callback):
Expand Down Expand Up @@ -351,21 +352,21 @@ def on_epoch_end(self, epoch, logs=None):
else:
self.best = max(self.best_k_models.values())
if self.verbose > 0:
logging.info(
log.info(
f'\nEpoch {epoch:05d}: {self.monitor} reached'
f' {current:0.5f} (best {self.best:0.5f}), saving model to'
f' {filepath} as top {self.save_top_k}')
self._save_model(filepath)

else:
if self.verbose > 0:
logging.info(
log.info(
f'\nEpoch {epoch:05d}: {self.monitor}'
f' was not in top {self.save_top_k}')

else:
if self.verbose > 0:
logging.info(f'\nEpoch {epoch:05d}: saving model to {filepath}')
log.info(f'\nEpoch {epoch:05d}: saving model to {filepath}')
self._save_model(filepath)


Expand Down Expand Up @@ -417,6 +418,6 @@ def on_epoch_begin(self, epoch, trainer):
# losses = [10, 9, 8, 8, 6, 4.3, 5, 4.4, 2.8, 2.5]
# for i, loss in enumerate(losses):
# should_stop = c.on_epoch_end(i, logs={'val_loss': loss})
# logging.info(loss)
# log.info(loss)
# if should_stop:
# break
8 changes: 4 additions & 4 deletions pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import collections
import logging
import logging as log
import csv
import os
import warnings
from abc import ABC, abstractmethod
from argparse import Namespace
import csv

import torch
import torch.distributed as dist
Expand Down Expand Up @@ -1130,7 +1130,7 @@ def __init__(self, hparams):

def summarize(self, mode):
model_summary = ModelSummary(self, mode=mode)
logging.info('\n' + model_summary.__str__())
log.info('\n' + model_summary.__str__())

def freeze(self):
r"""
Expand Down Expand Up @@ -1213,7 +1213,7 @@ def on_save_checkpoint(self, checkpoint):

def load_hparams_from_tags_csv(tags_csv):
if not os.path.isfile(tags_csv):
logging.warning(f'Missing Tags: {tags_csv}.')
log.warning(f'Missing Tags: {tags_csv}.')
return Namespace()

tags = {}
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/core/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
'''

import gc
import logging
import logging as log
import os
import subprocess
from subprocess import PIPE
Expand Down Expand Up @@ -214,7 +214,7 @@ def print_mem_stack(): # pragma: no cover
for obj in gc.get_objects():
try:
if torch.is_tensor(obj) or (hasattr(obj, 'data') and torch.is_tensor(obj.data)):
logging.info(type(obj), obj.size())
log.info(type(obj), obj.size())
except Exception:
pass

Expand Down
113 changes: 113 additions & 0 deletions pytorch_lightning/loggers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
"""
Lightning supports most popular logging frameworks (Tensorboard, comet, weights and biases, etc...).
To use a logger, simply pass it into the trainer.
.. code-block:: python
from pytorch_lightning import loggers
# lightning uses tensorboard by default
tb_logger = loggers.TensorBoardLogger()
trainer = Trainer(logger=tb_logger)
# or choose from any of the others such as MLFlow, Comet, Neptune, Wandb
comet_logger = loggers.CometLogger()
trainer = Trainer(logger=comet_logger)
.. note:: All loggers log by default to `os.getcwd()`. To change the path without creating a logger set
Trainer(default_save_path='/your/path/to/save/checkpoints')
Custom logger
-------------
You can implement your own logger by writing a class that inherits from
`LightningLoggerBase`. Use the `rank_zero_only` decorator to make sure that
only the first process in DDP training logs data.
.. code-block:: python
from pytorch_lightning.loggers import LightningLoggerBase, rank_zero_only
class MyLogger(LightningLoggerBase):
@rank_zero_only
def log_hyperparams(self, params):
# params is an argparse.Namespace
# your code to record hyperparameters goes here
pass
@rank_zero_only
def log_metrics(self, metrics, step):
# metrics is a dictionary of metric names and values
# your code to record metrics goes here
pass
def save(self):
# Optional. Any code necessary to save logger data goes here
pass
@rank_zero_only
def finalize(self, status):
# Optional. Any code that needs to be run after training
# finishes goes here
If you write a logger than may be useful to others, please send
a pull request to add it to Lighting!
Using loggers
-------------
Call the logger anywhere from your LightningModule by doing:
.. code-block:: python
def train_step(...):
# example
self.logger.experiment.whatever_method_summary_writer_supports(...)
def any_lightning_module_function_or_hook(...):
self.logger.experiment.add_histogram(...)
Supported Loggers
-----------------
"""
from os import environ

from .base import LightningLoggerBase, rank_zero_only
from .tensorboard import TensorBoardLogger

__all__ = ['TensorBoardLogger']

try:
# needed to prevent ImportError and duplicated logs.
environ["COMET_DISABLE_AUTO_LOGGING"] = "1"

from .comet import CometLogger
__all__.append('CometLogger')
except ImportError:
del environ["COMET_DISABLE_AUTO_LOGGING"]

try:
from .mlflow import MLFlowLogger
__all__.append('MLFlowLogger')
except ImportError:
pass

try:
from .neptune import NeptuneLogger
__all__.append('NeptuneLogger')
except ImportError:
pass

try:
from .test_tube import TestTubeLogger
__all__.append('TestTubeLogger')
except ImportError:
pass

try:
from .wandb import WandbLogger
__all__.append('WandbLogger')
except ImportError:
pass
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(self, api_key=None, save_dir=None, workspace=None,
.. code-block:: python
# ONLINE MODE
from pytorch_lightning.logging import CometLogger
from pytorch_lightning.loggers import CometLogger
# arguments made to CometLogger are passed on to the comet_ml.Experiment class
comet_logger = CometLogger(
Expand All @@ -47,7 +47,7 @@ def __init__(self, api_key=None, save_dir=None, workspace=None,
.. code-block:: python
# OFFLINE MODE
from pytorch_lightning.logging import CometLogger
from pytorch_lightning.loggers import CometLogger
# arguments made to CometLogger are passed on to the comet_ml.Experiment class
comet_logger = CometLogger(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
warnings.warn("`comet_logger` module has been renamed to `comet` since v0.6.0"
" and will be removed in v0.8.0", DeprecationWarning)

from pytorch_lightning.logging.comet import CometLogger # noqa: E402
from pytorch_lightning.loggers.comet import CometLogger # noqa: E402
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
.. code-block:: python
from pytorch_lightning.logging import MLFlowLogger
from pytorch_lightning.loggers import MLFlowLogger
mlf_logger = MLFlowLogger(
experiment_name="default",
tracking_uri="file:/."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
warnings.warn("`mlflow_logger` module has been renamed to `mlflow` since v0.6.0"
" and will be removed in v0.8.0", DeprecationWarning)

from pytorch_lightning.logging.mlflow import MLFlowLogger # noqa: E402
from pytorch_lightning.loggers.mlflow import MLFlowLogger # noqa: E402
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
.. code-block:: python
from pytorch_lightning.logging import NeptuneLogger
from pytorch_lightning.loggers import NeptuneLogger
# arguments made to NeptuneLogger are passed on to the neptune.experiments.Experiment class
neptune_logger = NeptuneLogger(
Expand Down Expand Up @@ -48,7 +48,7 @@ def any_lightning_module_function_or_hook(...):
from torch import is_tensor

# from .base import LightningLoggerBase, rank_zero_only
from pytorch_lightning.logging.base import LightningLoggerBase, rank_zero_only
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only

logger = getLogger(__name__)

Expand All @@ -66,7 +66,7 @@ def __init__(self, api_key=None, project_name=None, offline_mode=False,
.. code-block:: python
# ONLINE MODE
from pytorch_lightning.logging import NeptuneLogger
from pytorch_lightning.loggers import NeptuneLogger
# arguments made to NeptuneLogger are passed on to the neptune.experiments.Experiment class
neptune_logger = NeptuneLogger(
Expand All @@ -81,7 +81,7 @@ def __init__(self, api_key=None, project_name=None, offline_mode=False,
.. code-block:: python
# OFFLINE MODE
from pytorch_lightning.logging import NeptuneLogger
from pytorch_lightning.loggers import NeptuneLogger
# arguments made to NeptuneLogger are passed on to the neptune.experiments.Experiment class
neptune_logger = NeptuneLogger(
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
.. code-block:: python
from pytorch_lightning.logging import TestTubeLogger
from pytorch_lightning.loggers import TestTubeLogger
tt_logger = TestTubeLogger(
save_dir=".",
name="default",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
warnings.warn("`test_tube_logger` module has been renamed to `test_tube` since v0.6.0"
" and will be removed in v0.8.0", DeprecationWarning)

from pytorch_lightning.logging.test_tube import TestTubeLogger # noqa: E402
from pytorch_lightning.loggers.test_tube import TestTubeLogger # noqa: E402
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class WandbLogger(LightningLoggerBase):
--------
.. code-block:: python
from pytorch_lightning.logging import WandbLogger
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning import Trainer
wandb_logger = WandbLogger()
Expand Down
Loading

0 comments on commit 76a1c67

Please sign in to comment.