Skip to content

Commit

Permalink
warning messages if optimizer configuration does not look correct whi…
Browse files Browse the repository at this point in the history
…le finetuning

Signed-off-by: Paarth Neekhara <[email protected]>
  • Loading branch information
paarthneekhara committed Sep 17, 2021
1 parent 0d431e3 commit dc0d0fd
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions examples/tts/fastpitch2_finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,15 @@
from nemo.collections.tts.models import FastPitchModel
from nemo.core.config import hydra_runner
from nemo.utils.exp_manager import exp_manager
from nemo.utils import logging


@hydra_runner(config_path="conf", config_name="fastpitch_align_44100")
def main(cfg):
if hasattr(cfg.model.optim, 'sched'):
logging.warning("You are using an optimizer scheduler while finetuning. Are you sure this is intended?")
if (cfg.model.optim.lr > 1e-3 or cfg.model.optim.lr < 1e-5):
logging.warning("The recommended learning rate for finetuning is 2e-4")
trainer = pl.Trainer(**cfg.trainer)
exp_manager(trainer, cfg.get("exp_manager", None))
model = FastPitchModel(cfg=cfg.model, trainer=trainer)
Expand Down

0 comments on commit dc0d0fd

Please sign in to comment.