Revert dropout on attention scores to 0.0.

This commit is contained in:
Daniel Povey 2022-10-13 15:09:50 +08:00
parent 2a50def7c6
commit 7d8e460a53

View File

@ -263,7 +263,7 @@ class ConformerEncoderLayer(nn.Module):
self.d_model = d_model
self.self_attn = RelPositionMultiheadAttention(
d_model, nhead, dropout=dropout,
d_model, nhead, dropout=0.0,
)
self.feed_forward1 = FeedforwardModule(d_model,