From b441dffa836302de922ef3a725e30350206e0f4d Mon Sep 17 00:00:00 2001 From: Quandwang Date: Wed, 20 Jul 2022 17:46:40 +0800 Subject: [PATCH] fix typos and modify the expr of ScaledEmbedding --- egs/librispeech/ASR/conformer_ctc2/train.py | 2 +- egs/librispeech/ASR/pruned_transducer_stateless2/scaling.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/egs/librispeech/ASR/conformer_ctc2/train.py b/egs/librispeech/ASR/conformer_ctc2/train.py index 30764bb5f..d7baa229f 100755 --- a/egs/librispeech/ASR/conformer_ctc2/train.py +++ b/egs/librispeech/ASR/conformer_ctc2/train.py @@ -990,7 +990,7 @@ def run(rank, world_size, args): train_cuts, sampler_state_dict=sampler_state_dict ) - valid_cuts = librispeech.dev_cuts() + valid_cuts = librispeech.dev_clean_cuts() valid_cuts += librispeech.dev_other_cuts() valid_dl = librispeech.valid_dataloaders(valid_cuts) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless2/scaling.py b/egs/librispeech/ASR/pruned_transducer_stateless2/scaling.py index c190be626..26a8cca44 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless2/scaling.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless2/scaling.py @@ -643,7 +643,8 @@ class ScaledEmbedding(nn.Module): ) def extra_repr(self) -> str: - s = "{num_embeddings}, {embedding_dim}, scale={scale}" + # s = "{num_embeddings}, {embedding_dim}, scale={scale}" + s = "{num_embeddings}, {embedding_dim}" if self.padding_idx is not None: s += ", padding_idx={padding_idx}" if self.scale_grad_by_freq is not False: