fix issue in zipformer.py

This commit is contained in:
yaozengwei 2024-03-21 15:54:13 +08:00
parent bddc3fca7a
commit 40888f63e7

View File

@ -788,7 +788,7 @@ class Zipformer2EncoderLayer(nn.Module):
selected_attn_weights = attn_weights[0:1]
if torch.jit.is_scripting() or torch.jit.is_tracing():
pass
elif not self.training and random.random() < float(self.const_attention_rate):
elif self.training and random.random() < float(self.const_attention_rate):
# Make attention weights constant. The intention is to
# encourage these modules to do something similar to an
# averaging-over-time operation.