From 61f62837fa62d30488c84d57f5c442fc5bb84081 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Wed, 5 Oct 2022 15:34:39 +0800 Subject: [PATCH] Fix bug RE self.training --- egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py b/egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py index 52f691ac7..11b18af35 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py @@ -348,7 +348,7 @@ class ConformerEncoder(nn.Module): def get_random_mask(): # 1.0 means don't drop the layer, 0.0 means drop the layer mask = torch.ones(num_layers, device='cpu') - if self.training: + if not self.training: return mask r = rng.random() if r < 0.1: