From cc260711b881a2571350268a2f1a1147c9d1fb67 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Wed, 9 Nov 2022 13:26:18 +0800 Subject: [PATCH] Make pos_dim the same as it was in scaled_adam_exp229.. although this was probably too high. --- egs/librispeech/ASR/pruned_transducer_stateless7/train.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/train.py b/egs/librispeech/ASR/pruned_transducer_stateless7/train.py index ae7caa860..148e3a8cf 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/train.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/train.py @@ -162,7 +162,7 @@ def add_model_arguments(parser: argparse.ArgumentParser): parser.add_argument( "--pos-dim", type=int, - default="192", + default="384", help="Positional-encoding embedding dimension" ) @@ -1009,6 +1009,9 @@ def run(rank, world_size, args): num_param = sum([p.numel() for p in model.parameters()]) logging.info(f"Number of model parameters: {num_param}") + for name, module in model.named_modules(): + num_param = sum([p.numel() for p in module.parameters()]) + logging.info(f"Number of model parameters for {name}: {num_param}") assert params.save_every_n >= params.average_period model_avg: Optional[nn.Module] = None