diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp index 98a5c1d0d..3071e73d8 100644 Binary files a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp and b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp differ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py index 9baf413f7..97d473e42 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py @@ -1144,13 +1144,12 @@ def run(rank, world_size, args, wb=None): if wb is None: optimizer_enc = ScaledAdam( - dec_param, - lr=params.peak_enc_lr, - clipping_scale=5.0, - parameters_names=dec_names, + enc_param, + lr=param.peak_enc_lr, + clipping_scale=2.0, + parameters_names=parameters_names, ) - optimizer_dec = Eve(dec_param, lr=params.peak_dec_lr) else: logging.info('start wandb sweep optimization...') logging.info(wb.config.peak_enc_lr)