diff --git a/egs/librispeech/ASR/.run_v3.sh.swp b/egs/librispeech/ASR/.run_v3.sh.swp index 2f57e41d0..7bee67ff5 100644 Binary files a/egs/librispeech/ASR/.run_v3.sh.swp and b/egs/librispeech/ASR/.run_v3.sh.swp differ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp index 89b33ace3..a36faf282 100644 Binary files a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp and b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp differ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py index 65a5521d9..1ca772099 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py @@ -1206,6 +1206,10 @@ def run(rank, world_size, args, wb=None): parameters_names.append( [name_param_pair[0] for name_param_pair in model.named_parameters()] ) + + logging.info(f"len name = {len(parameters_names)}") + logging.info(f"len name = {len(list(model.parameters()))}") + optimizer = ScaledAdam( model.parameters(), lr=params.base_lr,