From b0c87a93d23ef3c6b57570a0cc907b69b1aa01b2 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Sat, 11 Feb 2023 18:15:59 +0800 Subject: [PATCH] Increase warmup of LR from 500 to 1000 batches --- egs/librispeech/ASR/pruned_transducer_stateless7/train.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/train.py b/egs/librispeech/ASR/pruned_transducer_stateless7/train.py index 52f25ae15..12ecb0521 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/train.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/train.py @@ -1119,7 +1119,8 @@ def run(rank, world_size, args): clipping_scale=2.0, ) - scheduler = Eden(optimizer, params.lr_batches, params.lr_epochs) + scheduler = Eden(optimizer, params.lr_batches, params.lr_epochs, + warmup_batches=1000.0) if checkpoints and "optimizer" in checkpoints: logging.info("Loading optimizer state dict")