From 11886dc4f63df30fd9c35302a840594dcca1c8a9 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Sat, 22 Oct 2022 18:22:26 +0800 Subject: [PATCH] Change base lr to 0.1, also rename from initial lr in train.py --- .../ASR/pruned_transducer_stateless7/train.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/train.py b/egs/librispeech/ASR/pruned_transducer_stateless7/train.py index e02bc9182..51e5317b4 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/train.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/train.py @@ -230,11 +230,10 @@ def get_parser(): ) parser.add_argument( - "--initial-lr", + "--base-lr", type=float, - default=0.075, - help="The initial learning rate. This value should not need " - "to be changed.", + default=0.1, + help="The base learning rate." ) parser.add_argument( @@ -977,7 +976,7 @@ def run(rank, world_size, args): find_unused_parameters=True) optimizer = ScaledAdam(model.parameters(), - lr=params.initial_lr, + lr=params.base_lr, clipping_scale=2.0) scheduler = Eden(optimizer, params.lr_batches, params.lr_epochs)