From 3b13bf44db19285dfed288ee03efa0af691e3dc2 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Wed, 10 Feb 2021 13:26:53 -0600 Subject: [PATCH 01/19] feat(wandb): use new wandb API --- pytorch_lightning/loggers/wandb.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 63708ff1e5852..8829389f2f33c 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -56,7 +56,6 @@ class WandbLogger(LightningLoggerBase): project: The name of the project to which this run will belong. log_model: Save checkpoints in wandb dir to upload on W&B servers. prefix: A string to put at the beginning of metric keys. - sync_step: Sync Trainer step with wandb step. experiment: WandB experiment object. Automatically set when creating a run. \**kwargs: Additional arguments like `entity`, `group`, `tags`, etc. used by :func:`wandb.init` can be passed as keyword arguments in this logger. @@ -94,7 +93,6 @@ def __init__( log_model: Optional[bool] = False, experiment=None, prefix: Optional[str] = '', - sync_step: Optional[bool] = True, **kwargs ): if wandb is None: @@ -119,7 +117,6 @@ def __init__( self._project = project self._log_model = log_model self._prefix = prefix - self._sync_step = sync_step self._experiment = experiment self._kwargs = kwargs # logging multiple Trainer on a single W&B run (k-fold, resuming, etc) @@ -167,6 +164,9 @@ def experiment(self) -> Run: # save checkpoints in wandb dir to upload on W&B servers if self._save_dir is None: self._save_dir = self._experiment.dir + + # define default x-axis + self._experiment.define_metric("*", x_axis='train/step', auto=True) return self._experiment def watch(self, model: nn.Module, log: str = 'gradients', log_freq: int = 100): @@ -184,15 +184,8 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> assert rank_zero_only.rank == 0, 'experiment tried to log from global_rank != 0' metrics = self._add_prefix(metrics) - if self._sync_step and step is not None and step + self._step_offset < self.experiment.step: - self.warning_cache.warn( - 'Trying to log at a previous step. Use `WandbLogger(sync_step=False)`' - ' or try logging with `commit=False` when calling manually `wandb.log`.' - ) - if self._sync_step: - self.experiment.log(metrics, step=(step + self._step_offset) if step is not None else None) - elif step is not None: - self.experiment.log({**metrics, 'trainer_step': (step + self._step_offset)}) + if step is not None: + self.experiment.log({**metrics, 'train/step': (step + self._step_offset)}) else: self.experiment.log(metrics) From 866eebe5372fac279cf736d015c574d6a9d2bb0b Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Wed, 10 Feb 2021 13:35:58 -0600 Subject: [PATCH 02/19] feat: handle earlier wandb versions --- pytorch_lightning/loggers/wandb.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 8829389f2f33c..20dcd9182a5b0 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -165,8 +165,9 @@ def experiment(self) -> Run: if self._save_dir is None: self._save_dir = self._experiment.dir - # define default x-axis - self._experiment.define_metric("*", x_axis='train/step', auto=True) + # define default x-axis (for latest wandb versions) + if getattr(self._experiment, "define_metric", None): + self._experiment.define_metric("*", x_axis='train/step', auto=True) return self._experiment def watch(self, model: nn.Module, log: str = 'gradients', log_freq: int = 100): From 46a46801f393c5ef23453943ecfc5ec3d4a03163 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Wed, 10 Feb 2021 13:47:48 -0600 Subject: [PATCH 03/19] feat: remove unused import --- pytorch_lightning/loggers/wandb.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 20dcd9182a5b0..12dd62672eff3 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -24,7 +24,6 @@ from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import _module_available, rank_zero_only from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.utilities.warning_utils import WarningCache _WANDB_AVAILABLE = _module_available("wandb") @@ -121,7 +120,6 @@ def __init__( self._kwargs = kwargs # logging multiple Trainer on a single W&B run (k-fold, resuming, etc) self._step_offset = 0 - self.warning_cache = WarningCache() def __getstate__(self): state = self.__dict__.copy() @@ -168,6 +166,7 @@ def experiment(self) -> Run: # define default x-axis (for latest wandb versions) if getattr(self._experiment, "define_metric", None): self._experiment.define_metric("*", x_axis='train/step', auto=True) + return self._experiment def watch(self, model: nn.Module, log: str = 'gradients', log_freq: int = 100): From 9f3ef6109a6ae865dadf4dc017cf274b74c1a9b3 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Wed, 10 Feb 2021 15:49:59 -0600 Subject: [PATCH 04/19] feat(wandb): regular x-axis for train/step --- pytorch_lightning/loggers/wandb.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 12dd62672eff3..1ff64d15d1717 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -165,6 +165,7 @@ def experiment(self) -> Run: # define default x-axis (for latest wandb versions) if getattr(self._experiment, "define_metric", None): + self._experiment.define_metric('train/step') self._experiment.define_metric("*", x_axis='train/step', auto=True) return self._experiment From 0b037fcc38b7524efb0604161de1e2b8f13a9674 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Wed, 10 Feb 2021 16:09:52 -0600 Subject: [PATCH 05/19] feat(wandb): offset not needed anymore --- pytorch_lightning/loggers/wandb.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 1ff64d15d1717..e813cf278d456 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -118,8 +118,6 @@ def __init__( self._prefix = prefix self._experiment = experiment self._kwargs = kwargs - # logging multiple Trainer on a single W&B run (k-fold, resuming, etc) - self._step_offset = 0 def __getstate__(self): state = self.__dict__.copy() @@ -156,9 +154,6 @@ def experiment(self) -> Run: **self._kwargs ) if wandb.run is None else wandb.run - # offset logging step when resuming a run - self._step_offset = self._experiment.step - # save checkpoints in wandb dir to upload on W&B servers if self._save_dir is None: self._save_dir = self._experiment.dir @@ -186,7 +181,7 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> metrics = self._add_prefix(metrics) if step is not None: - self.experiment.log({**metrics, 'train/step': (step + self._step_offset)}) + self.experiment.log({**metrics, 'train/step': step}) else: self.experiment.log(metrics) @@ -206,10 +201,6 @@ def version(self) -> Optional[str]: @rank_zero_only def finalize(self, status: str) -> None: - # offset future training logged on same W&B run - if self._experiment is not None: - self._step_offset = self._experiment.step - # upload all checkpoints from saving dir if self._log_model: wandb.save(os.path.join(self.save_dir, "*.ckpt")) From e46ba5fdbcc6f656e47470487d59623e17599cfa Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Thu, 11 Feb 2021 19:24:48 -0600 Subject: [PATCH 06/19] tests(wandb): handle new API --- tests/loggers/test_all.py | 2 +- tests/loggers/test_wandb.py | 33 +++------------------------------ 2 files changed, 4 insertions(+), 31 deletions(-) diff --git a/tests/loggers/test_all.py b/tests/loggers/test_all.py index 02721ba436743..20d7f7b8ed3e9 100644 --- a/tests/loggers/test_all.py +++ b/tests/loggers/test_all.py @@ -404,4 +404,4 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): wandb.run = None wandb.init().step = 0 logger.log_metrics({"test": 1.0}, step=0) - logger.experiment.log.assert_called_once_with({'tmp-test': 1.0}, step=0) + logger.experiment.log.assert_called_once_with({'tmp-test': 1.0, 'train/step': 0}) diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index e5b9b891b88c1..968fddb62877e 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -41,22 +41,7 @@ def test_wandb_logger_init(wandb, recwarn): logger = WandbLogger() logger.log_metrics({'acc': 1.0}) wandb.init.assert_called_once() - wandb.init().log.assert_called_once_with({'acc': 1.0}, step=None) - - # test sync_step functionality - wandb.init().log.reset_mock() - wandb.init.reset_mock() - wandb.run = None - wandb.init().step = 0 - logger = WandbLogger(sync_step=False) - logger.log_metrics({'acc': 1.0}) wandb.init().log.assert_called_once_with({'acc': 1.0}) - wandb.init().log.reset_mock() - logger.log_metrics({'acc': 1.0}, step=3) - wandb.init().log.assert_called_once_with({'acc': 1.0, 'trainer_step': 3}) - - # mock wandb step - wandb.init().step = 0 # test wandb.init not called if there is a W&B run wandb.init().log.reset_mock() @@ -65,13 +50,12 @@ def test_wandb_logger_init(wandb, recwarn): logger = WandbLogger() logger.log_metrics({'acc': 1.0}, step=3) wandb.init.assert_called_once() - wandb.init().log.assert_called_once_with({'acc': 1.0}, step=3) + wandb.init().log.assert_called_once_with({'acc': 1.0, 'train/step': 3}) # continue training on same W&B run and offset step - wandb.init().step = 3 logger.finalize('success') - logger.log_metrics({'acc': 1.0}, step=3) - wandb.init().log.assert_called_with({'acc': 1.0}, step=6) + logger.log_metrics({'acc': 1.0}, step=6) + wandb.init().log.assert_called_with({'acc': 1.0, 'train/step': 6}) # log hyper parameters logger.log_hyperparams({'test': None, 'nested': {'a': 1}, 'b': [2, 3, 4]}) @@ -88,17 +72,6 @@ def test_wandb_logger_init(wandb, recwarn): logger.watch('model', 'log', 10) wandb.init().watch.assert_called_once_with('model', log='log', log_freq=10) - # verify warning for logging at a previous step - assert 'Trying to log at a previous step' not in get_warnings(recwarn) - # current step from wandb should be 6 (last logged step) - logger.experiment.step = 6 - # logging at step 2 should raise a warning (step_offset is still 3) - logger.log_metrics({'acc': 1.0}, step=2) - assert 'Trying to log at a previous step' in get_warnings(recwarn) - # logging again at step 2 should not display again the same warning - logger.log_metrics({'acc': 1.0}, step=2) - assert 'Trying to log at a previous step' not in get_warnings(recwarn) - assert logger.name == wandb.init().project_name() assert logger.version == wandb.init().id From a0940cf14c2e298f57e89d8357c415c7ea2a51c5 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Thu, 11 Feb 2021 20:29:06 -0600 Subject: [PATCH 07/19] style: remove white space --- pytorch_lightning/loggers/wandb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index e813cf278d456..ebdd65d71105a 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -157,7 +157,7 @@ def experiment(self) -> Run: # save checkpoints in wandb dir to upload on W&B servers if self._save_dir is None: self._save_dir = self._experiment.dir - + # define default x-axis (for latest wandb versions) if getattr(self._experiment, "define_metric", None): self._experiment.define_metric('train/step') From 6b80d73275ee6df7709f1b1edec26977e0f64315 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Thu, 11 Feb 2021 20:35:36 -0600 Subject: [PATCH 08/19] doc(wandb): update CHANGELOG --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 38d97c984c6da..2f9b95098eac4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -178,6 +178,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed support custom DataLoader with DDP if they can be re-instantiated ([#5745](https://github.com/PyTorchLightning/pytorch-lightning/pull/5745)) +- Prevent `WandbLogger` from dropping values ([#5931](https://github.com/PyTorchLightning/pytorch-lightning/pull/5931)) + ## [1.1.8] - 2021-02-08 ### Fixed From 8a2818c7f0f1c1f5cd39c2c6fd5f16eadb800aeb Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 16 Feb 2021 18:44:08 -0600 Subject: [PATCH 09/19] feat(wandb): update per API --- pytorch_lightning/loggers/wandb.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index ebdd65d71105a..a49701d72cb39 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -160,8 +160,7 @@ def experiment(self) -> Run: # define default x-axis (for latest wandb versions) if getattr(self._experiment, "define_metric", None): - self._experiment.define_metric('train/step') - self._experiment.define_metric("*", x_axis='train/step', auto=True) + self._experiment.define_metric("*", step_metric='train/step', step_sync=True) return self._experiment From f00dadf0aac923a038f02e26e5cb79d60dc2e8c6 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Fri, 19 Feb 2021 10:27:09 -0600 Subject: [PATCH 10/19] feat(wandb): deprecation of sync_step --- pytorch_lightning/loggers/wandb.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 849a79560543f..4871576207dda 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -24,6 +24,9 @@ from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import _module_available, rank_zero_only from pytorch_lightning.utilities.exceptions import MisconfigurationException +from pytorch_lightning.utilities.warnings import WarningCache + +warning_cache = WarningCache() _WANDB_AVAILABLE = _module_available("wandb") @@ -90,6 +93,7 @@ def __init__( log_model: Optional[bool] = False, experiment=None, prefix: Optional[str] = '', + sync_step: Optional[bool] = None **kwargs ): if wandb is None: @@ -104,6 +108,13 @@ def __init__( ' since model checkpoints cannot be uploaded in offline mode.\n' 'Hint: Set `offline=False` to log your model.' ) + + if sync_step is not None: + # TODO: remove sync_step option in v1.3 + warning_cache.warn( + "`WandbLogger(sync_step=True)` is deprecated in v1.2 and will be removed in v1.3." + " Metrics are now logged separatelty and automatically synchronized." , DeprecationWarning + ) super().__init__() self._name = name From 6230155834b397bc23db7089c2f66a37f231b4ee Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Fri, 19 Feb 2021 10:28:52 -0600 Subject: [PATCH 11/19] fix(wandb): typo --- pytorch_lightning/loggers/wandb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 4871576207dda..7eb570904d587 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -93,7 +93,7 @@ def __init__( log_model: Optional[bool] = False, experiment=None, prefix: Optional[str] = '', - sync_step: Optional[bool] = None + sync_step: Optional[bool] = None, **kwargs ): if wandb is None: From 59cf073e9995c2f3e6d0f7fc7f42056bd4631fce Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Fri, 19 Feb 2021 10:59:37 -0600 Subject: [PATCH 12/19] style: fix pep8 --- pytorch_lightning/loggers/wandb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 7eb570904d587..6fbbf4f1253ca 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -108,12 +108,12 @@ def __init__( ' since model checkpoints cannot be uploaded in offline mode.\n' 'Hint: Set `offline=False` to log your model.' ) - + if sync_step is not None: # TODO: remove sync_step option in v1.3 warning_cache.warn( "`WandbLogger(sync_step=True)` is deprecated in v1.2 and will be removed in v1.3." - " Metrics are now logged separatelty and automatically synchronized." , DeprecationWarning + " Metrics are now logged separatelty and automatically synchronized.", DeprecationWarning ) super().__init__() From d95dd58a86401e945955b78742b0a702d18d9ee3 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Fri, 19 Feb 2021 11:17:00 -0600 Subject: [PATCH 13/19] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Carlos MocholĂ­ --- pytorch_lightning/loggers/wandb.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 6fbbf4f1253ca..f6d5e7503ab06 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -110,10 +110,9 @@ def __init__( ) if sync_step is not None: - # TODO: remove sync_step option in v1.3 warning_cache.warn( - "`WandbLogger(sync_step=True)` is deprecated in v1.2 and will be removed in v1.3." - " Metrics are now logged separatelty and automatically synchronized.", DeprecationWarning + "`WandbLogger(sync_step=True)` is deprecated in v1.2.1 and will be removed in v1.5." + " Metrics are now logged separately and automatically synchronized.", DeprecationWarning ) super().__init__() From b0f48e57d4e47f75b061e16252b5281b98f9c9e7 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Fri, 19 Feb 2021 11:34:03 -0600 Subject: [PATCH 14/19] docs: update CHANGELOG --- CHANGELOG.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 62accc545bcaf..2c5a4eb09a4e7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ### Fixed - Fixed incorrect yield logic for the amp autocast context manager ([#6080](https://github.com/PyTorchLightning/pytorch-lightning/pull/6080)) +- Prevent `WandbLogger` from dropping values ([#5931](https://github.com/PyTorchLightning/pytorch-lightning/pull/5931)) ## [1.2.0] - 2021-02-18 @@ -172,8 +173,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed wrapping optimizers upon assignment ([#6006](https://github.com/PyTorchLightning/pytorch-lightning/pull/6006)) - Fixed allowing hashing of metrics with lists in their state ([#5939](https://github.com/PyTorchLightning/pytorch-lightning/pull/5939)) -- Prevent `WandbLogger` from dropping values ([#5931](https://github.com/PyTorchLightning/pytorch-lightning/pull/5931)) - ## [1.1.8] - 2021-02-08 From b19f5d217bdb35db2fc8f06b90947c1afcadb9b3 Mon Sep 17 00:00:00 2001 From: Carlos Mocholi Date: Sun, 21 Feb 2021 02:58:44 +0100 Subject: [PATCH 15/19] Add deprecation test --- CHANGELOG.md | 2 ++ pytorch_lightning/loggers/wandb.py | 2 +- tests/deprecated_api/test_remove_1-5.py | 25 +++++++++++++++++++++++++ 3 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 tests/deprecated_api/test_remove_1-5.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c5a4eb09a4e7..a201fa1d73509 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ### Fixed - Fixed incorrect yield logic for the amp autocast context manager ([#6080](https://github.com/PyTorchLightning/pytorch-lightning/pull/6080)) + + - Prevent `WandbLogger` from dropping values ([#5931](https://github.com/PyTorchLightning/pytorch-lightning/pull/5931)) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index f6d5e7503ab06..f6ecd25f0232a 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -111,7 +111,7 @@ def __init__( if sync_step is not None: warning_cache.warn( - "`WandbLogger(sync_step=True)` is deprecated in v1.2.1 and will be removed in v1.5." + "`WandbLogger(sync_step=(True|False))` is deprecated in v1.2.1 and will be removed in v1.5." " Metrics are now logged separately and automatically synchronized.", DeprecationWarning ) diff --git a/tests/deprecated_api/test_remove_1-5.py b/tests/deprecated_api/test_remove_1-5.py new file mode 100644 index 0000000000000..c5221f61c0545 --- /dev/null +++ b/tests/deprecated_api/test_remove_1-5.py @@ -0,0 +1,25 @@ +# Copyright The PyTorch Lightning team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test deprecated functionality which will be removed in v1.5.0""" +from unittest import mock + +import pytest + +from pytorch_lightning.loggers import WandbLogger + + +@mock.patch('pytorch_lightning.loggers.wandb.wandb') +def test_v1_5_0_wandb_unused_sync_step(tmpdir): + with pytest.deprecated_call(match=r"v1.2.1 and will be removed in v1.5"): + WandbLogger(sync_step=True) From 66391003fc92941ea5dbb4053781acbee663ec11 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 23 Feb 2021 18:20:57 -0600 Subject: [PATCH 16/19] Apply suggestions from code review Co-authored-by: thomas chaton --- pytorch_lightning/loggers/wandb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index f6ecd25f0232a..32462df337c71 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -168,7 +168,7 @@ def experiment(self) -> Run: # define default x-axis (for latest wandb versions) if getattr(self._experiment, "define_metric", None): - self._experiment.define_metric("*", step_metric='train/step', step_sync=True) + self._experiment.define_metric("*", step_metric=trainer/global_step', step_sync=True) return self._experiment @@ -188,7 +188,7 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> metrics = self._add_prefix(metrics) if step is not None: - self.experiment.log({**metrics, 'train/step': step}) + self.experiment.log({**metrics, 'trainer/global_step': step}) else: self.experiment.log(metrics) From 6d587a0540aef9d6c81aa6a66c031ac69b45ca09 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 23 Feb 2021 18:24:33 -0600 Subject: [PATCH 17/19] fix(wandb): tests and typo --- pytorch_lightning/loggers/wandb.py | 3 ++- tests/loggers/test_all.py | 2 +- tests/loggers/test_wandb.py | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 32462df337c71..6a2502b176f0f 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -168,7 +168,8 @@ def experiment(self) -> Run: # define default x-axis (for latest wandb versions) if getattr(self._experiment, "define_metric", None): - self._experiment.define_metric("*", step_metric=trainer/global_step', step_sync=True) + self._experiment.define_metric("trainer/global_step") + self._experiment.define_metric("*", step_metric='trainer/global_step', step_sync=True) return self._experiment diff --git a/tests/loggers/test_all.py b/tests/loggers/test_all.py index 20d7f7b8ed3e9..c80dddde2774c 100644 --- a/tests/loggers/test_all.py +++ b/tests/loggers/test_all.py @@ -404,4 +404,4 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): wandb.run = None wandb.init().step = 0 logger.log_metrics({"test": 1.0}, step=0) - logger.experiment.log.assert_called_once_with({'tmp-test': 1.0, 'train/step': 0}) + logger.experiment.log.assert_called_once_with({'tmp-test': 1.0, 'trainer/global_step': 0}) diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index 968fddb62877e..0eefb9625ddc7 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -50,12 +50,12 @@ def test_wandb_logger_init(wandb, recwarn): logger = WandbLogger() logger.log_metrics({'acc': 1.0}, step=3) wandb.init.assert_called_once() - wandb.init().log.assert_called_once_with({'acc': 1.0, 'train/step': 3}) + wandb.init().log.assert_called_once_with({'acc': 1.0, 'trainer/global_step': 3}) # continue training on same W&B run and offset step logger.finalize('success') logger.log_metrics({'acc': 1.0}, step=6) - wandb.init().log.assert_called_with({'acc': 1.0, 'train/step': 6}) + wandb.init().log.assert_called_with({'acc': 1.0, 'trainer/global_step': 6}) # log hyper parameters logger.log_hyperparams({'test': None, 'nested': {'a': 1}, 'b': [2, 3, 4]}) From 3ced69cfb147d7d03ada4cce1d87052883fc30a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Sat, 27 Feb 2021 02:26:09 +0100 Subject: [PATCH 18/19] fix changelog --- CHANGELOG.md | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d4ec6ed22cc39..f6f58f63d4851 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -45,12 +45,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ### Fixed -- Fixed incorrect yield logic for the amp autocast context manager ([#6080](https://github.com/PyTorchLightning/pytorch-lightning/pull/6080)) - - -- Prevent `WandbLogger` from dropping values ([#5931](https://github.com/PyTorchLightning/pytorch-lightning/pull/5931)) - - - Made the `Plugin.reduce` method more consistent across all Plugins to reflect a mean-reduction by default ([#6011](https://github.com/PyTorchLightning/pytorch-lightning/pull/6011) @@ -69,6 +63,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed multiple early stopping callbacks ([#6197](https://github.com/PyTorchLightning/pytorch-lightning/pull/6197)) +- Prevent `WandbLogger` from dropping values ([#5931](https://github.com/PyTorchLightning/pytorch-lightning/pull/5931)) + + ## [1.2.1] - 2021-02-23 ### Fixed From 8ffdf0bcd6a3696a6b2aa93919134014a46fd397 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Sat, 27 Feb 2021 02:26:46 +0100 Subject: [PATCH 19/19] fix changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f6f58f63d4851..06656907ef442 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -45,7 +45,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ### Fixed -- Made the `Plugin.reduce` method more consistent across all Plugins to reflect a mean-reduction by default ([#6011](https://github.com/PyTorchLightning/pytorch-lightning/pull/6011) +- Made the `Plugin.reduce` method more consistent across all Plugins to reflect a mean-reduction by default ([#6011](https://github.com/PyTorchLightning/pytorch-lightning/pull/6011)) - Move lightning module to correct device type when using LightningDistributedWrapper ([#6070](https://github.com/PyTorchLightning/pytorch-lightning/pull/6070))