Reduce attention_squeeze dim from 512 to 128.

This commit is contained in:
Daniel Povey 2022-12-11 18:51:01 +08:00
parent 634f1a4b82
commit 05c7cb5c83

View File

@ -446,7 +446,7 @@ class ZipformerEncoderLayer(nn.Module):
cnn_module_kernel)
self.attention_squeeze = AttentionSqueeze(embed_dim, 512)
self.attention_squeeze = AttentionSqueeze(embed_dim, 128)
self.norm_final = BasicNorm(embed_dim)