From 7d8e460a53fb118a10b487a877bc25d5bbdf146a Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Thu, 13 Oct 2022 15:09:50 +0800 Subject: [PATCH] Revert dropout on attention scores to 0.0. --- egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py b/egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py index c00f04e31..177aa3c3b 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/conformer.py @@ -263,7 +263,7 @@ class ConformerEncoderLayer(nn.Module): self.d_model = d_model self.self_attn = RelPositionMultiheadAttention( - d_model, nhead, dropout=dropout, + d_model, nhead, dropout=0.0, ) self.feed_forward1 = FeedforwardModule(d_model,