diff --git a/CHANGELOG.md b/CHANGELOG.md index 99996c6281938..559475682123f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -95,6 +95,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - `Trainer.request_dataloader` now takes a `RunningStage` enum instance ([#8858](https://github.com/PyTorchLightning/pytorch-lightning/pull/8858)) + ### Deprecated - Deprecated `LightningModule.summarize()` in favor of `pytorch_lightning.utilities.model_summary.summarize()` @@ -149,6 +150,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed reset dataloader hooks to Training Plugins and Accelerators ([#8858](https://github.com/PyTorchLightning/pytorch-lightning/pull/8858)) +- Removed deprecated `GradInformation` module in favor of `pytorch_lightning.utilities.grads` ([]()) + ### Fixed diff --git a/pytorch_lightning/core/grads.py b/pytorch_lightning/core/grads.py deleted file mode 100644 index a557687958d70..0000000000000 --- a/pytorch_lightning/core/grads.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright The PyTorch Lightning team. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Module to describe gradients. This class is deprecated in v1.3 and will be removed in v1.5 -""" -from typing import Dict, Union - -from torch.nn import Module - -from pytorch_lightning.utilities import rank_zero_deprecation -from pytorch_lightning.utilities.grads import grad_norm as new_grad_norm - - -class GradInformation(Module): - def grad_norm(self, norm_type: Union[float, int, str]) -> Dict[str, float]: - """Compute each parameter's gradient's norm and their overall norm. - - .. deprecated:: v1.3 - Will be removed in v1.5.0. Use :func:`pytorch_lightning.utilities.grads.grad_norm` instead. - """ - rank_zero_deprecation( - "LightningModule.grad_norm is deprecated in v1.3 and will be removed in v1.5." - " Use grad_norm from pytorch_lightning.utilities.grads instead." - ) - return new_grad_norm(self, norm_type) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 21a07add45eaa..fe924ed147554 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -31,7 +31,6 @@ from torch.optim.optimizer import Optimizer from torchmetrics import Metric -from pytorch_lightning.core.grads import GradInformation from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks, ModelHooks from pytorch_lightning.core.mixins import DeviceDtypeModuleMixin, HyperparametersMixin from pytorch_lightning.core.optimizer import LightningOptimizer @@ -57,7 +56,6 @@ class LightningModule( ABC, DeviceDtypeModuleMixin, HyperparametersMixin, - GradInformation, ModelIO, ModelHooks, DataHooks, diff --git a/tests/deprecated_api/test_remove_1-5.py b/tests/deprecated_api/test_remove_1-5.py index 9ff4291da59fb..f1613b97b5a1f 100644 --- a/tests/deprecated_api/test_remove_1-5.py +++ b/tests/deprecated_api/test_remove_1-5.py @@ -161,12 +161,6 @@ def bar(self): pass -def test_v1_5_0_lighting_module_grad_norm(tmpdir): - model = BoringModel() - with pytest.deprecated_call(match="is deprecated in v1.3 and will be removed in v1.5"): - model.grad_norm(2) - - def test_v1_5_0_datamodule_setter(): model = BoringModel() datamodule = BoringDataModule()