From ed65330261d06a396dad336f55d3cd14042cd1ad Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Mon, 16 Jan 2023 13:18:29 +0800 Subject: [PATCH] RemoveAttentionSqueeze --- .../ASR/pruned_transducer_stateless7/zipformer.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py index 1595d0544..0fcb4bbe3 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py @@ -497,7 +497,7 @@ class ZipformerEncoderLayer(nn.Module): cnn_module_kernel) - self.attention_squeeze = AttentionSqueeze(embed_dim, embed_dim // 2) + #self.attention_squeeze = AttentionSqueeze(embed_dim, embed_dim // 2) self.norm = BasicNorm(embed_dim) @@ -642,10 +642,10 @@ class ZipformerEncoderLayer(nn.Module): src = src + self.feed_forward1(src) - # pooling module - if torch.jit.is_scripting() or use_self_attn: - src = src + self.balancer_as( - self.attention_squeeze(src, selected_attn_weights[1:2])) + ## pooling module + #if torch.jit.is_scripting() or use_self_attn: + # src = src + self.balancer_as( + # self.attention_squeeze(src, selected_attn_weights[1:2])) if torch.jit.is_scripting() or use_self_attn: src = src + self.self_attn(