From 634f1a4b82c9dd91a7a9c3e5c9f449da1da27398 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Sun, 11 Dec 2022 17:20:52 +0800 Subject: [PATCH] Hardcode AttentionSqueeze dim at 512. --- egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py index 826678931..f7142bab3 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py @@ -446,7 +446,7 @@ class ZipformerEncoderLayer(nn.Module): cnn_module_kernel) - self.attention_squeeze = AttentionSqueeze(embed_dim, embed_dim) + self.attention_squeeze = AttentionSqueeze(embed_dim, 512) self.norm_final = BasicNorm(embed_dim)