Skip to content

Commit

Permalink
Remove unused post_optimizer_step (#9746)
Browse files Browse the repository at this point in the history
  • Loading branch information
carmocca authored Sep 29, 2021
1 parent 454b4f7 commit 9ebfbbc
Show file tree
Hide file tree
Showing 4 changed files with 3 additions and 9 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed `TrainingTypePlugin.on_save` and `Accelerator.on_save` ([#9023](https://github.com/PyTorchLightning/pytorch-lightning/pull/9023))


- Removed `{Accelerator,TrainingTypePlugin,PrecisionPlugin}.post_optimizer_step` ([#9746](https://github.com/PyTorchLightning/pytorch-lightning/pull/9746))


- Removed deprecated `connect_precision_plugin` and `connect_training_type_plugin` from `Accelerator` ([#9019](https://github.com/PyTorchLightning/pytorch-lightning/pull/9019))


Expand Down
2 changes: 0 additions & 2 deletions pytorch_lightning/accelerators/accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,8 +258,6 @@ def optimizer_step(self, optimizer: Optimizer, opt_idx: int, lambda_closure: Cal
)
if make_optimizer_step:
self.run_optimizer_step(optimizer, opt_idx, lambda_closure, **kwargs)
self.precision_plugin.post_optimizer_step(optimizer, opt_idx)
self.training_type_plugin.post_optimizer_step(optimizer, opt_idx, **kwargs)

def run_optimizer_step(
self, optimizer: Optimizer, optimizer_idx: int, lambda_closure: Callable, **kwargs: Any
Expand Down
3 changes: 0 additions & 3 deletions pytorch_lightning/plugins/precision/precision_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,6 @@ def pre_optimizer_step(
model.trainer.call_hook("on_before_optimizer_step", optimizer, optimizer_idx)
return True

def post_optimizer_step(self, optimizer: Optimizer, optimizer_idx: int) -> None:
"""Hook to do something after each optimizer step."""

def clip_gradients(
self,
optimizer: Optimizer,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import torch
from torch import Tensor
from torch.nn import Module
from torch.optim import Optimizer
from torch.utils.data import DataLoader

import pytorch_lightning as pl
Expand Down Expand Up @@ -141,9 +140,6 @@ def pre_backward(self, closure_loss: torch.Tensor) -> None:
def post_backward(self, closure_loss: torch.Tensor) -> None:
"""Run after precision plugin executes backward."""

def post_optimizer_step(self, optimizer: Optimizer, optimizer_idx: int, **kwargs) -> None:
"""Hook to do something after each optimizer step."""

@property
def model(self) -> Optional[Module]:
"""Returns the potentially wrapped LightningModule."""
Expand Down

0 comments on commit 9ebfbbc

Please sign in to comment.