Speed up learning rate schedule.

This commit is contained in:
Daniel Povey 2022-05-28 11:30:45 +08:00
parent 828defb019
commit e771472a30

View File

@ -168,7 +168,7 @@ def get_parser():
parser.add_argument( parser.add_argument(
"--lr-batches", "--lr-batches",
type=float, type=float,
default=5000, default=3000,
help="""Number of steps that affects how rapidly the learning rate decreases. help="""Number of steps that affects how rapidly the learning rate decreases.
We suggest not to change this.""", We suggest not to change this.""",
) )
@ -176,7 +176,7 @@ def get_parser():
parser.add_argument( parser.add_argument(
"--lr-epochs", "--lr-epochs",
type=float, type=float,
default=6, default=4,
help="""Number of epochs that affects how rapidly the learning rate decreases. help="""Number of epochs that affects how rapidly the learning rate decreases.
""", """,
) )
@ -881,10 +881,6 @@ def run(rank, world_size, args):
logging.info("Loading optimizer state dict") logging.info("Loading optimizer state dict")
optimizer.load_state_dict(checkpoints["optimizer"]) optimizer.load_state_dict(checkpoints["optimizer"])
for p in optimizer.param_groups:
p['max_eff_lr'] = params.initial_lr # TEMP!!
if ( if (
checkpoints checkpoints
and "scheduler" in checkpoints and "scheduler" in checkpoints