Setting lr_update_period=(200,4k) in train.py

This commit is contained in:
Daniel Povey 2022-07-25 04:38:12 +08:00
parent 06718052ec
commit b0f0c6c4ab

View File

@ -928,7 +928,8 @@ def run(rank, world_size, args):
optimizer = PrAdam(model.parameters(), optimizer = PrAdam(model.parameters(),
lr=params.initial_lr, lr=params.initial_lr,
max_block_size=512) max_block_size=512,
lr_update_period=(200, 4000))
scheduler = Eden(optimizer, params.lr_batches, params.lr_epochs) scheduler = Eden(optimizer, params.lr_batches, params.lr_epochs)