Increase warmup of LR from 500 to 1000 batches

This commit is contained in:
Daniel Povey 2023-02-11 18:15:59 +08:00
parent db543866d8
commit b0c87a93d2

View File

@ -1119,7 +1119,8 @@ def run(rank, world_size, args):
clipping_scale=2.0,
)
scheduler = Eden(optimizer, params.lr_batches, params.lr_epochs)
scheduler = Eden(optimizer, params.lr_batches, params.lr_epochs,
warmup_batches=1000.0)
if checkpoints and "optimizer" in checkpoints:
logging.info("Loading optimizer state dict")