Skip to content

Commit

Permalink
Remove unused post_optimizer_step
Browse files Browse the repository at this point in the history
  • Loading branch information
carmocca committed Sep 29, 2021
1 parent 019e69f commit 90ed4d0
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 9 deletions.
2 changes: 0 additions & 2 deletions pytorch_lightning/accelerators/accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,8 +258,6 @@ def optimizer_step(self, optimizer: Optimizer, opt_idx: int, lambda_closure: Cal
)
if make_optimizer_step:
self.run_optimizer_step(optimizer, opt_idx, lambda_closure, **kwargs)
self.precision_plugin.post_optimizer_step(optimizer, opt_idx)
self.training_type_plugin.post_optimizer_step(optimizer, opt_idx, **kwargs)

def run_optimizer_step(
self, optimizer: Optimizer, optimizer_idx: int, lambda_closure: Callable, **kwargs: Any
Expand Down
3 changes: 0 additions & 3 deletions pytorch_lightning/plugins/precision/precision_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,6 @@ def pre_optimizer_step(
model.trainer.call_hook("on_before_optimizer_step", optimizer, optimizer_idx)
return True

def post_optimizer_step(self, optimizer: Optimizer, optimizer_idx: int) -> None:
"""Hook to do something after each optimizer step."""

def clip_gradients(
self,
optimizer: Optimizer,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import torch
from torch import Tensor
from torch.nn import Module
from torch.optim import Optimizer
from torch.utils.data import DataLoader

import pytorch_lightning as pl
Expand Down Expand Up @@ -141,9 +140,6 @@ def pre_backward(self, closure_loss: torch.Tensor) -> None:
def post_backward(self, closure_loss: torch.Tensor) -> None:
"""Run after precision plugin executes backward."""

def post_optimizer_step(self, optimizer: Optimizer, optimizer_idx: int, **kwargs) -> None:
"""Hook to do something after each optimizer step."""

@property
def model(self) -> Optional[Module]:
"""Returns the potentially wrapped LightningModule."""
Expand Down

0 comments on commit 90ed4d0

Please sign in to comment.