Implement 2p version of learning rate schedule.

This commit is contained in:
Daniel Povey 2022-04-10 13:50:31 +08:00
parent da50525ca5
commit 82d58629ea

View File

@ -791,7 +791,7 @@ def run(rank, world_size, args):
scheduler = torch.optim.lr_scheduler.LambdaLR( scheduler = torch.optim.lr_scheduler.LambdaLR(
optimizer, optimizer,
lambda step: (((step**2 + params.lr_steps**2) / params.lr_steps**2) ** -0.25 * lambda step: (((step**2 + params.lr_steps**2) / params.lr_steps**2) ** -0.25 *
(((epoch**2 + params.lr_epochs**2) / params.lr_epochs**2) ** -0.25)) (((epoch**2 + params.lr_epochs**2) / params.lr_epochs**2) ** -0.25)))
if checkpoints and "optimizer" in checkpoints: if checkpoints and "optimizer" in checkpoints: