Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add warning_cache.deprecation and set warning stacklevel [1/2] #8005

Merged
merged 3 commits into from
Jun 18, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions pytorch_lightning/callbacks/model_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -650,10 +650,10 @@ def _add_backward_monitor_support(self, trainer: 'pl.Trainer') -> None:
self.save_top_k = 1

if deprecation_warning:
warning_cache.warn(
warning_cache.deprecation(
"Relying on `self.log('val_loss', ...)` to set the ModelCheckpoint monitor is deprecated in v1.2"
" and will be removed in v1.4. Please, create your own `mc = ModelCheckpoint(monitor='your_monitor')`"
" and use it as `Trainer(callbacks=[mc])`.", DeprecationWarning
" and use it as `Trainer(callbacks=[mc])`.",
)

def _validate_monitor_key(self, trainer: 'pl.Trainer') -> None:
Expand Down
9 changes: 5 additions & 4 deletions pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,9 +168,10 @@ def example_input_array(self, example: Any) -> None:

@property
def datamodule(self) -> Any:
rank_zero_deprecation(
warning_cache.deprecation(
"The `LightningModule.datamodule` property is deprecated in v1.3 and will be removed in v1.5."
" Access the datamodule through using `self.trainer.datamodule` instead."
" Access the datamodule through using `self.trainer.datamodule` instead.",
stacklevel=5,
carmocca marked this conversation as resolved.
Show resolved Hide resolved
)
return self._datamodule

Expand Down Expand Up @@ -223,10 +224,10 @@ def _apply_batch_transfer_handler(
if is_param_in_hook_signature(self.transfer_batch_to_device, 'dataloader_idx'):
batch = self.transfer_batch_to_device(batch, device, dataloader_idx)
else:
warning_cache.warn(
warning_cache.deprecation(
"`transfer_batch_to_device` hook signature has changed in v1.4."
" `dataloader_idx` parameter has been added to it. Support for"
" the old signature will be removed in v1.6", DeprecationWarning
" the old signature will be removed in v1.6"
)
batch = self.transfer_batch_to_device(batch, device)

Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/loggers/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,9 +137,9 @@ def __init__(
)

if sync_step is not None:
warning_cache.warn(
warning_cache.deprecation(
"`WandbLogger(sync_step=(True|False))` is deprecated in v1.2.1 and will be removed in v1.5."
" Metrics are now logged separately and automatically synchronized.", DeprecationWarning
" Metrics are now logged separately and automatically synchronized."
)

super().__init__()
Expand Down
8 changes: 4 additions & 4 deletions pytorch_lightning/loops/training_batch_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,10 +488,10 @@ def build_train_args(self, batch: Any, batch_idx: int, opt_idx: int, hiddens: Te
if len(self.trainer.optimizers) > 1:
if self.trainer.has_arg("training_step", "optimizer_idx"):
if not self.trainer.lightning_module.automatic_optimization:
self.warning_cache.warn(
self.warning_cache.deprecation(
"`training_step` hook signature has changed in v1.3."
" `optimizer_idx` argument has been removed in case of manual optimization. Support for"
" the old signature will be removed in v1.5", DeprecationWarning
" the old signature will be removed in v1.5",
)
args.append(opt_idx)
elif not self.trainer.has_arg(
Expand Down Expand Up @@ -682,10 +682,10 @@ def _build_kwargs(self, batch: Any, batch_idx: int, opt_idx: int, hiddens: Optio
has_opt_idx_in_train_step = is_param_in_hook_signature(training_step_fx, "optimizer_idx")
if has_opt_idx_in_train_step:
if not lightning_module.automatic_optimization:
self.warning_cache.warn(
self.warning_cache.deprecation(
"`training_step` hook signature has changed in v1.3."
" `optimizer_idx` argument has been removed in case of manual optimization. Support for"
" the old signature will be removed in v1.5", DeprecationWarning
" the old signature will be removed in v1.5",
)
step_kwargs['optimizer_idx'] = opt_idx
elif not has_opt_idx_in_train_step and lightning_module.automatic_optimization:
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/loops/training_epoch_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,10 +231,10 @@ def _on_train_epoch_end_hook(self, processed_epoch_output: List[List[STEP_OUTPUT
if is_overridden(hook_name, model_ref):
hook_fx = getattr(model_ref, hook_name)
if is_param_in_hook_signature(hook_fx, "outputs"):
self.warning_cache.warn(
self.warning_cache.deprecation(
"The signature of `ModelHooks.on_train_epoch_end` has changed in v1.3."
" `outputs` parameter has been deprecated."
" Support for the old signature will be removed in v1.5", DeprecationWarning
" Support for the old signature will be removed in v1.5",
)
model_ref.on_train_epoch_end(processed_epoch_output)
else:
Expand Down
8 changes: 4 additions & 4 deletions pytorch_lightning/plugins/training_type/deepspeed.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import json
import logging
import os
import warnings
from collections import OrderedDict
from pathlib import Path
from typing import Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Union
Expand All @@ -30,7 +29,7 @@
from pytorch_lightning.trainer.optimizers import _get_default_scheduler_config
from pytorch_lightning.utilities import AMPType
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_only
from pytorch_lightning.utilities.distributed import _warn, rank_zero_info, rank_zero_only
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _DEEPSPEED_AVAILABLE

Expand Down Expand Up @@ -260,10 +259,11 @@ def __init__(
)

if cpu_offload or cpu_offload_params or cpu_offload_use_pin_memory:
warnings.warn(
_warn(
carmocca marked this conversation as resolved.
Show resolved Hide resolved
"The usage of `cpu_offload`, `cpu_offload_params`, and `cpu_offload_use_pin_memory` "
"is deprecated since v1.4 and will be removed in v1.5."
" From now on use `offload_optimizer`, `offload_parameters` and `pin_memory`.", DeprecationWarning
" From now on use `offload_optimizer`, `offload_parameters` and `pin_memory`.",
category=DeprecationWarning
)
offload_optimizer = cpu_offload
offload_parameters = cpu_offload_params
Expand Down
5 changes: 2 additions & 3 deletions pytorch_lightning/profiler/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from pathlib import Path
from typing import Any, Callable, Dict, Optional, TextIO, Union

from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities import rank_zero_deprecation
from pytorch_lightning.utilities.cloud_io import get_filesystem

log = logging.getLogger(__name__)
Expand Down Expand Up @@ -63,10 +63,9 @@ def __init__(
self.dirpath = dirpath
self.filename = filename
if output_filename is not None:
rank_zero_warn(
rank_zero_deprecation(
"`Profiler` signature has changed in v1.3. The `output_filename` parameter has been removed in"
" favor of `dirpath` and `filename`. Support for the old signature will be removed in v1.5",
DeprecationWarning
)
filepath = Path(output_filename)
self.dirpath = filepath.parent
Expand Down
6 changes: 3 additions & 3 deletions pytorch_lightning/profiler/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from torch.autograd.profiler import record_function

from pytorch_lightning.profiler.base import BaseProfiler
from pytorch_lightning.utilities.distributed import rank_zero_warn
from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _KINETO_AVAILABLE

Expand Down Expand Up @@ -349,9 +349,9 @@ def __deprecation_check(
record_functions = set()

if profiled_functions is not None:
rank_zero_warn(
rank_zero_deprecation(
"`PyTorchProfiler.profiled_functions` has been renamed to"
" `record_functions` in v1.3 and will be removed in v1.5", DeprecationWarning
" `record_functions` in v1.3 and will be removed in v1.5",
)
if not record_functions:
record_functions |= set(profiled_functions)
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/trainer/callback_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,10 @@ def on_train_epoch_end(self, outputs: EPOCH_OUTPUT):
"""
for callback in self.callbacks:
if is_param_in_hook_signature(callback.on_train_epoch_end, "outputs"):
warning_cache.warn(
warning_cache.deprecation(
"The signature of `Callback.on_train_epoch_end` has changed in v1.3."
" `outputs` parameter has been removed."
" Support for the old signature will be removed in v1.5", DeprecationWarning
" Support for the old signature will be removed in v1.5"
)
callback.on_train_epoch_end(self, self.lightning_module, outputs)
else:
Expand Down
5 changes: 2 additions & 3 deletions pytorch_lightning/utilities/device_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

import torch

from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_warn
from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_deprecation
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _compare_version

Expand Down Expand Up @@ -121,12 +121,11 @@ def _normalize_parse_gpu_string_input(s: Union[int, str, List[int]]) -> Union[in
else:
num_gpus = int(s.strip())
if _compare_version("pytorch_lightning", operator.lt, "1.5"):
rank_zero_warn(
rank_zero_deprecation(
f"Parsing of the Trainer argument gpus='{s}' (string) will change in the future."
" In the current version of Lightning, this will select"
f" CUDA device with index {num_gpus}, but from v1.5 it will select gpus"
f" {list(range(num_gpus))} (same as gpus={s} (int)).",
DeprecationWarning,
)
return [num_gpus]
return num_gpus
Expand Down
7 changes: 6 additions & 1 deletion pytorch_lightning/utilities/warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pytorch_lightning.utilities.distributed import rank_zero_warn
from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_warn


class WarningCache(set):
Expand All @@ -20,3 +20,8 @@ def warn(self, m, *args, **kwargs):
if m not in self:
self.add(m)
rank_zero_warn(m, *args, **kwargs)

def deprecation(self, m, *args, **kwargs):
if m not in self:
self.add(m)
rank_zero_deprecation(m, *args, **kwargs)
6 changes: 4 additions & 2 deletions tests/deprecated_api/test_remove_1-5.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,8 +369,10 @@ def test_v1_5_0_datamodule_setter():
datamodule = BoringDataModule()
with no_deprecated_call(match="The `LightningModule.datamodule`"):
model.datamodule = datamodule
with pytest.deprecated_call(match="The `LightningModule.datamodule`"):
_ = model.datamodule
from pytorch_lightning.core.lightning import warning_cache
warning_cache.clear()
_ = model.datamodule
assert any("The `LightningModule.datamodule`" in w for w in warning_cache)


def test_v1_5_0_trainer_tbptt_steps(tmpdir):
Expand Down