Skip to content

Commit

Permalink
Add warning_cache.deprecation and set warning stacklevel [1/2] (#8005)
Browse files Browse the repository at this point in the history
Co-authored-by: Carlos Mocholi <carlossmocholi@gmail.com>
  • Loading branch information
awaelchli and carmocca committed Jun 22, 2021
1 parent cd95ab7 commit f6d1ae7
Show file tree
Hide file tree
Showing 10 changed files with 25 additions and 34 deletions.
4 changes: 2 additions & 2 deletions pytorch_lightning/callbacks/model_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -607,10 +607,10 @@ def _add_backward_monitor_support(self, trainer: 'pl.Trainer') -> None:
self.save_top_k = 1

if deprecation_warning:
warning_cache.warn(
warning_cache.deprecation(
"Relying on `self.log('val_loss', ...)` to set the ModelCheckpoint monitor is deprecated in v1.2"
" and will be removed in v1.4. Please, create your own `mc = ModelCheckpoint(monitor='your_monitor')`"
" and use it as `Trainer(callbacks=[mc])`.", DeprecationWarning
" and use it as `Trainer(callbacks=[mc])`.",
)

def _validate_monitor_key(self, trainer: 'pl.Trainer') -> None:
Expand Down
4 changes: 0 additions & 4 deletions pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,10 +165,6 @@ def example_input_array(self, example: Any) -> None:

@property
def datamodule(self) -> Any:
rank_zero_deprecation(
"The `LightningModule.datamodule` property is deprecated in v1.3 and will be removed in v1.5."
" Access the datamodule through using `self.trainer.datamodule` instead."
)
return self._datamodule

@datamodule.setter
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/loggers/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,9 @@ def __init__(
)

if sync_step is not None:
warning_cache.warn(
warning_cache.deprecation(
"`WandbLogger(sync_step=(True|False))` is deprecated in v1.2.1 and will be removed in v1.5."
" Metrics are now logged separately and automatically synchronized.", DeprecationWarning
" Metrics are now logged separately and automatically synchronized."
)

super().__init__()
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/plugins/training_type/deepspeed.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from pytorch_lightning.trainer.optimizers import _get_default_scheduler_config
from pytorch_lightning.utilities import AMPType
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_only
from pytorch_lightning.utilities.distributed import _warn, rank_zero_info, rank_zero_only
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _DEEPSPEED_AVAILABLE

Expand Down
8 changes: 4 additions & 4 deletions pytorch_lightning/profiler/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
from torch import nn, Tensor
from torch.autograd.profiler import record_function

from pytorch_lightning.profiler.profilers import BaseProfiler
from pytorch_lightning.utilities.distributed import rank_zero_warn
from pytorch_lightning.profiler import BaseProfiler
from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _KINETO_AVAILABLE

Expand Down Expand Up @@ -349,9 +349,9 @@ def __deprecation_check(
record_functions = set()

if profiled_functions is not None:
rank_zero_warn(
rank_zero_deprecation(
"`PyTorchProfiler.profiled_functions` has been renamed to"
" `record_functions` in v1.3 and will be removed in v1.5", DeprecationWarning
" `record_functions` in v1.3 and will be removed in v1.5",
)
if not record_functions:
record_functions |= set(profiled_functions)
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/trainer/callback_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,10 @@ def on_train_epoch_end(self, outputs: EPOCH_OUTPUT):
"""
for callback in self.callbacks:
if is_param_in_hook_signature(callback.on_train_epoch_end, "outputs"):
warning_cache.warn(
warning_cache.deprecation(
"The signature of `Callback.on_train_epoch_end` has changed in v1.3."
" `outputs` parameter has been removed."
" Support for the old signature will be removed in v1.5", DeprecationWarning
" Support for the old signature will be removed in v1.5"
)
callback.on_train_epoch_end(self, self.lightning_module, outputs)
else:
Expand Down
8 changes: 4 additions & 4 deletions pytorch_lightning/trainer/training_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -642,10 +642,10 @@ def _on_train_epoch_end_hook(self, processed_epoch_output) -> None:
if is_overridden(hook_name, model_ref):
hook_fx = getattr(model_ref, hook_name)
if is_param_in_hook_signature(hook_fx, "outputs"):
self.warning_cache.warn(
self.warning_cache.deprecation(
"The signature of `ModelHooks.on_train_epoch_end` has changed in v1.3."
" `outputs` parameter has been deprecated."
" Support for the old signature will be removed in v1.5", DeprecationWarning
" Support for the old signature will be removed in v1.5",
)
model_ref.on_train_epoch_end(processed_epoch_output)
else:
Expand Down Expand Up @@ -944,10 +944,10 @@ def build_train_args(self, batch, batch_idx, opt_idx, hiddens):
if len(self.trainer.optimizers) > 1:
if self.trainer.has_arg("training_step", "optimizer_idx"):
if not self.trainer.lightning_module.automatic_optimization:
self.warning_cache.warn(
self.warning_cache.deprecation(
"`training_step` hook signature has changed in v1.3."
" `optimizer_idx` argument has been removed in case of manual optimization. Support for"
" the old signature will be removed in v1.5", DeprecationWarning
" the old signature will be removed in v1.5",
)
args.append(opt_idx)
elif not self.trainer.has_arg(
Expand Down
5 changes: 2 additions & 3 deletions pytorch_lightning/utilities/device_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

import torch

from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_warn
from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_deprecation
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _compare_version

Expand Down Expand Up @@ -121,12 +121,11 @@ def _normalize_parse_gpu_string_input(s: Union[int, str, List[int]]) -> Union[in
else:
num_gpus = int(s.strip())
if _compare_version("pytorch_lightning", operator.lt, "1.5"):
rank_zero_warn(
rank_zero_deprecation(
f"Parsing of the Trainer argument gpus='{s}' (string) will change in the future."
" In the current version of Lightning, this will select"
f" CUDA device with index {num_gpus}, but from v1.5 it will select gpus"
f" {list(range(num_gpus))} (same as gpus={s} (int)).",
DeprecationWarning,
)
return [num_gpus]
return num_gpus
Expand Down
11 changes: 8 additions & 3 deletions pytorch_lightning/utilities/warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,23 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pytorch_lightning.utilities.distributed import rank_zero_warn
from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_warn


class WarningCache:

def __init__(self):
self.warnings = set()

def clear(self):
self.warnings.clear()

def warn(self, m, *args, **kwargs):
if m not in self.warnings:
self.warnings.add(m)
rank_zero_warn(m, *args, **kwargs)

def clear(self):
self.warnings.clear()
def deprecation(self, m, *args, **kwargs):
if m not in self.warnings:
self.warnings.add(m)
rank_zero_deprecation(m, *args, **kwargs)
9 changes: 0 additions & 9 deletions tests/deprecated_api/test_remove_1-5.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,15 +362,6 @@ def test_v1_5_0_trainer_gpus_str_parsing(*_):
assert gpus == [0]


def test_v1_5_0_datamodule_setter():
model = BoringModel()
datamodule = BoringDataModule()
with no_deprecated_call(match="The `LightningModule.datamodule`"):
model.datamodule = datamodule
with pytest.deprecated_call(match="The `LightningModule.datamodule`"):
_ = model.datamodule


def test_v1_5_0_trainer_tbptt_steps(tmpdir):
with pytest.deprecated_call(match="is deprecated in v1.3 and will be removed in v1.5"):
_ = Trainer(truncated_bptt_steps=1)

0 comments on commit f6d1ae7

Please sign in to comment.