From 22204450dbb87f2a54213c856057f63c35d3efea Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Tue, 13 Dec 2022 18:51:22 +0800 Subject: [PATCH] Make min_abs of AttentionSqueeze smaller, the same as nonlin_attention_module --- egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py index 7c7b39fe9..360993621 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py @@ -1406,7 +1406,7 @@ class AttentionSqueeze(nn.Module): self.out_balancer = ActivationBalancer( embed_dim, channel_dim=-1, min_positive=0.3, max_positive=0.7, - min_abs=ScheduledFloat((0.0, 0.002), (8000.0, 0.02), (20000.0, 0.01)), + min_abs=ScheduledFloat((0.0, 0.001), (8000.0, 0.01), (20000.0, 0.005)), )