diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index 067ebfdeafbe7..0d1132f191652 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -650,10 +650,10 @@ def _add_backward_monitor_support(self, trainer: 'pl.Trainer') -> None: self.save_top_k = 1 if deprecation_warning: - warning_cache.warn( + warning_cache.deprecation( "Relying on `self.log('val_loss', ...)` to set the ModelCheckpoint monitor is deprecated in v1.2" " and will be removed in v1.4. Please, create your own `mc = ModelCheckpoint(monitor='your_monitor')`" - " and use it as `Trainer(callbacks=[mc])`.", DeprecationWarning + " and use it as `Trainer(callbacks=[mc])`.", ) def _validate_monitor_key(self, trainer: 'pl.Trainer') -> None: diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index a1b2ce3a5e8f3..73b9ed3f7e9ab 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -168,9 +168,10 @@ def example_input_array(self, example: Any) -> None: @property def datamodule(self) -> Any: - rank_zero_deprecation( + warning_cache.deprecation( "The `LightningModule.datamodule` property is deprecated in v1.3 and will be removed in v1.5." - " Access the datamodule through using `self.trainer.datamodule` instead." + " Access the datamodule through using `self.trainer.datamodule` instead.", + stacklevel=5, ) return self._datamodule @@ -223,10 +224,10 @@ def _apply_batch_transfer_handler( if is_param_in_hook_signature(self.transfer_batch_to_device, 'dataloader_idx'): batch = self.transfer_batch_to_device(batch, device, dataloader_idx) else: - warning_cache.warn( + warning_cache.deprecation( "`transfer_batch_to_device` hook signature has changed in v1.4." " `dataloader_idx` parameter has been added to it. Support for" - " the old signature will be removed in v1.6", DeprecationWarning + " the old signature will be removed in v1.6" ) batch = self.transfer_batch_to_device(batch, device) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index c127fa037ed6b..5daf2176f3421 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -137,9 +137,9 @@ def __init__( ) if sync_step is not None: - warning_cache.warn( + warning_cache.deprecation( "`WandbLogger(sync_step=(True|False))` is deprecated in v1.2.1 and will be removed in v1.5." - " Metrics are now logged separately and automatically synchronized.", DeprecationWarning + " Metrics are now logged separately and automatically synchronized." ) super().__init__() diff --git a/pytorch_lightning/loops/training_batch_loop.py b/pytorch_lightning/loops/training_batch_loop.py index b581c6c8c1384..22af4d741d5a9 100644 --- a/pytorch_lightning/loops/training_batch_loop.py +++ b/pytorch_lightning/loops/training_batch_loop.py @@ -488,10 +488,10 @@ def build_train_args(self, batch: Any, batch_idx: int, opt_idx: int, hiddens: Te if len(self.trainer.optimizers) > 1: if self.trainer.has_arg("training_step", "optimizer_idx"): if not self.trainer.lightning_module.automatic_optimization: - self.warning_cache.warn( + self.warning_cache.deprecation( "`training_step` hook signature has changed in v1.3." " `optimizer_idx` argument has been removed in case of manual optimization. Support for" - " the old signature will be removed in v1.5", DeprecationWarning + " the old signature will be removed in v1.5", ) args.append(opt_idx) elif not self.trainer.has_arg( @@ -682,10 +682,10 @@ def _build_kwargs(self, batch: Any, batch_idx: int, opt_idx: int, hiddens: Optio has_opt_idx_in_train_step = is_param_in_hook_signature(training_step_fx, "optimizer_idx") if has_opt_idx_in_train_step: if not lightning_module.automatic_optimization: - self.warning_cache.warn( + self.warning_cache.deprecation( "`training_step` hook signature has changed in v1.3." " `optimizer_idx` argument has been removed in case of manual optimization. Support for" - " the old signature will be removed in v1.5", DeprecationWarning + " the old signature will be removed in v1.5", ) step_kwargs['optimizer_idx'] = opt_idx elif not has_opt_idx_in_train_step and lightning_module.automatic_optimization: diff --git a/pytorch_lightning/loops/training_epoch_loop.py b/pytorch_lightning/loops/training_epoch_loop.py index d029c525d71ac..67fc2e2a6f72c 100644 --- a/pytorch_lightning/loops/training_epoch_loop.py +++ b/pytorch_lightning/loops/training_epoch_loop.py @@ -231,10 +231,10 @@ def _on_train_epoch_end_hook(self, processed_epoch_output: List[List[STEP_OUTPUT if is_overridden(hook_name, model_ref): hook_fx = getattr(model_ref, hook_name) if is_param_in_hook_signature(hook_fx, "outputs"): - self.warning_cache.warn( + self.warning_cache.deprecation( "The signature of `ModelHooks.on_train_epoch_end` has changed in v1.3." " `outputs` parameter has been deprecated." - " Support for the old signature will be removed in v1.5", DeprecationWarning + " Support for the old signature will be removed in v1.5", ) model_ref.on_train_epoch_end(processed_epoch_output) else: diff --git a/pytorch_lightning/plugins/training_type/deepspeed.py b/pytorch_lightning/plugins/training_type/deepspeed.py index 8f613081cdfe2..86510fbed6fe2 100644 --- a/pytorch_lightning/plugins/training_type/deepspeed.py +++ b/pytorch_lightning/plugins/training_type/deepspeed.py @@ -15,7 +15,6 @@ import json import logging import os -import warnings from collections import OrderedDict from pathlib import Path from typing import Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Union @@ -30,7 +29,7 @@ from pytorch_lightning.trainer.optimizers import _get_default_scheduler_config from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.apply_func import apply_to_collection -from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_only +from pytorch_lightning.utilities.distributed import _warn, rank_zero_info, rank_zero_only from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.imports import _DEEPSPEED_AVAILABLE @@ -260,10 +259,11 @@ def __init__( ) if cpu_offload or cpu_offload_params or cpu_offload_use_pin_memory: - warnings.warn( + _warn( "The usage of `cpu_offload`, `cpu_offload_params`, and `cpu_offload_use_pin_memory` " "is deprecated since v1.4 and will be removed in v1.5." - " From now on use `offload_optimizer`, `offload_parameters` and `pin_memory`.", DeprecationWarning + " From now on use `offload_optimizer`, `offload_parameters` and `pin_memory`.", + category=DeprecationWarning ) offload_optimizer = cpu_offload offload_parameters = cpu_offload_params diff --git a/pytorch_lightning/profiler/base.py b/pytorch_lightning/profiler/base.py index 2a064085e8da7..d327d34e8d8c6 100644 --- a/pytorch_lightning/profiler/base.py +++ b/pytorch_lightning/profiler/base.py @@ -19,7 +19,7 @@ from pathlib import Path from typing import Any, Callable, Dict, Optional, TextIO, Union -from pytorch_lightning.utilities import rank_zero_warn +from pytorch_lightning.utilities import rank_zero_deprecation from pytorch_lightning.utilities.cloud_io import get_filesystem log = logging.getLogger(__name__) @@ -63,10 +63,9 @@ def __init__( self.dirpath = dirpath self.filename = filename if output_filename is not None: - rank_zero_warn( + rank_zero_deprecation( "`Profiler` signature has changed in v1.3. The `output_filename` parameter has been removed in" " favor of `dirpath` and `filename`. Support for the old signature will be removed in v1.5", - DeprecationWarning ) filepath = Path(output_filename) self.dirpath = filepath.parent diff --git a/pytorch_lightning/profiler/pytorch.py b/pytorch_lightning/profiler/pytorch.py index b78922d7f4a47..e04f8800636ee 100644 --- a/pytorch_lightning/profiler/pytorch.py +++ b/pytorch_lightning/profiler/pytorch.py @@ -24,7 +24,7 @@ from torch.autograd.profiler import record_function from pytorch_lightning.profiler.base import BaseProfiler -from pytorch_lightning.utilities.distributed import rank_zero_warn +from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.imports import _KINETO_AVAILABLE @@ -349,9 +349,9 @@ def __deprecation_check( record_functions = set() if profiled_functions is not None: - rank_zero_warn( + rank_zero_deprecation( "`PyTorchProfiler.profiled_functions` has been renamed to" - " `record_functions` in v1.3 and will be removed in v1.5", DeprecationWarning + " `record_functions` in v1.3 and will be removed in v1.5", ) if not record_functions: record_functions |= set(profiled_functions) diff --git a/pytorch_lightning/trainer/callback_hook.py b/pytorch_lightning/trainer/callback_hook.py index 3b5b4d403831b..288f6b0f8cd0c 100644 --- a/pytorch_lightning/trainer/callback_hook.py +++ b/pytorch_lightning/trainer/callback_hook.py @@ -97,10 +97,10 @@ def on_train_epoch_end(self, outputs: EPOCH_OUTPUT): """ for callback in self.callbacks: if is_param_in_hook_signature(callback.on_train_epoch_end, "outputs"): - warning_cache.warn( + warning_cache.deprecation( "The signature of `Callback.on_train_epoch_end` has changed in v1.3." " `outputs` parameter has been removed." - " Support for the old signature will be removed in v1.5", DeprecationWarning + " Support for the old signature will be removed in v1.5" ) callback.on_train_epoch_end(self, self.lightning_module, outputs) else: diff --git a/pytorch_lightning/utilities/device_parser.py b/pytorch_lightning/utilities/device_parser.py index 511a91326953d..ecb5d6ac00a03 100644 --- a/pytorch_lightning/utilities/device_parser.py +++ b/pytorch_lightning/utilities/device_parser.py @@ -16,7 +16,7 @@ import torch -from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_warn +from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_deprecation from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.imports import _compare_version @@ -121,12 +121,11 @@ def _normalize_parse_gpu_string_input(s: Union[int, str, List[int]]) -> Union[in else: num_gpus = int(s.strip()) if _compare_version("pytorch_lightning", operator.lt, "1.5"): - rank_zero_warn( + rank_zero_deprecation( f"Parsing of the Trainer argument gpus='{s}' (string) will change in the future." " In the current version of Lightning, this will select" f" CUDA device with index {num_gpus}, but from v1.5 it will select gpus" f" {list(range(num_gpus))} (same as gpus={s} (int)).", - DeprecationWarning, ) return [num_gpus] return num_gpus diff --git a/pytorch_lightning/utilities/warnings.py b/pytorch_lightning/utilities/warnings.py index 4ac6b2b4cbb54..7017ef5c3100c 100644 --- a/pytorch_lightning/utilities/warnings.py +++ b/pytorch_lightning/utilities/warnings.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from pytorch_lightning.utilities.distributed import rank_zero_warn +from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_warn class WarningCache(set): @@ -20,3 +20,8 @@ def warn(self, m, *args, **kwargs): if m not in self: self.add(m) rank_zero_warn(m, *args, **kwargs) + + def deprecation(self, m, *args, **kwargs): + if m not in self: + self.add(m) + rank_zero_deprecation(m, *args, **kwargs) diff --git a/tests/deprecated_api/test_remove_1-5.py b/tests/deprecated_api/test_remove_1-5.py index 62020b62a4768..d4df7f2e65034 100644 --- a/tests/deprecated_api/test_remove_1-5.py +++ b/tests/deprecated_api/test_remove_1-5.py @@ -369,8 +369,10 @@ def test_v1_5_0_datamodule_setter(): datamodule = BoringDataModule() with no_deprecated_call(match="The `LightningModule.datamodule`"): model.datamodule = datamodule - with pytest.deprecated_call(match="The `LightningModule.datamodule`"): - _ = model.datamodule + from pytorch_lightning.core.lightning import warning_cache + warning_cache.clear() + _ = model.datamodule + assert any("The `LightningModule.datamodule`" in w for w in warning_cache) def test_v1_5_0_trainer_tbptt_steps(tmpdir):