Skip to content

Commit

Permalink
Merge regression with LR schedulers
Browse files Browse the repository at this point in the history
  • Loading branch information
mittagessen committed Dec 3, 2023
1 parent ba4c81f commit baff962
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions kraken/lib/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,7 +665,7 @@ def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_closure):
for pg in optimizer.param_groups:
pg["lr"] = lr_scale * self.hparams.hyper_params['lrate']

def lr_scheduler_step(self, scheduler, optimizer_idx, metric):
def lr_scheduler_step(self, scheduler, metric):
if not self.hparams.hyper_params['warmup'] or self.trainer.global_step >= self.hparams.hyper_params['warmup']:
# step OneCycleLR each batch if not in warmup phase
if isinstance(scheduler, lr_scheduler.OneCycleLR):
Expand Down Expand Up @@ -1080,7 +1080,7 @@ def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_closure):
for pg in optimizer.param_groups:
pg["lr"] = lr_scale * self.hparams.hyper_params['lrate']

def lr_scheduler_step(self, scheduler, optimizer_idx, metric):
def lr_scheduler_step(self, scheduler, metric):
if not self.hparams.hyper_params['warmup'] or self.trainer.global_step >= self.hparams.hyper_params['warmup']:
# step OneCycleLR each batch if not in warmup phase
if isinstance(scheduler, lr_scheduler.OneCycleLR):
Expand Down

0 comments on commit baff962

Please sign in to comment.