diff --git a/CHANGELOG.md b/CHANGELOG.md index eb9fdfbc5ba06..1affe14147817 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ### Fixed +- Flattening Wandb Hyperparameters ([#2459](https://github.com/PyTorchLightning/pytorch-lightning/pull/2459)) + - Fixed using the same DDP python interpreter and actually running ([#2482](https://github.com/PyTorchLightning/pytorch-lightning/pull/2482)) - Fixed model summary input type conversion for models that have input dtype different from model parameters ([#2510](https://github.com/PyTorchLightning/pytorch-lightning/pull/2510)) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 4e357806579f6..d0a139266ca06 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -125,6 +125,7 @@ def watch(self, model: nn.Module, log: str = 'gradients', log_freq: int = 100): @rank_zero_only def log_hyperparams(self, params: Union[Dict[str, Any], Namespace]) -> None: params = self._convert_params(params) + params = self._flatten_dict(params) self.experiment.config.update(params, allow_val_change=True) @rank_zero_only diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index aa8b616bcf475..182b518d1e52d 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -19,9 +19,12 @@ def test_wandb_logger(wandb): logger.log_metrics({'acc': 1.0}, step=3) wandb.init().log.assert_called_once_with({'global_step': 3, 'acc': 1.0}) - logger.log_hyperparams({'test': None}) - wandb.init().config.update.assert_called_once_with({'test': None}, allow_val_change=True) - + logger.log_hyperparams({'test': None, 'nested': {'a': 1}, 'b': [2, 3, 4]}) + wandb.init().config.update.assert_called_once_with( + {'test': 'None', 'nested/a': 1, 'b': [2, 3, 4]}, + allow_val_change=True, + ) + logger.watch('model', 'log', 10) wandb.init().watch.assert_called_once_with('model', log='log', log_freq=10)