From c6d859dd05fa0aac2603ba6b6785e2cd786fd024 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Mon, 28 Nov 2022 11:35:00 +0800 Subject: [PATCH] Increase min_abs of balancer in NonlinAttentionModule from 1.5 to 2.0. --- .../ASR/pruned_transducer_stateless7/zipformer.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py index cf78ab105..ef75b892a 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py @@ -1433,12 +1433,15 @@ class NonlinAttentionModule(nn.Module): self.in_proj = nn.Linear(channels, channels, bias=True) - # balancer that goes before the sigmoid. + # balancer that goes before the sigmoid. Have quite a large min_abs value, at 2.0, + # because we noticed that well-trained instances of this module have abs-value before the sigmoid + # starting from about 3, and poorly-trained instances of the module have smaller abs values + # before the sigmoid. self.balancer = ActivationBalancer( channels // 2, channel_dim=-1, min_positive=ScheduledFloat((0.0, 0.1), (8000.0, 0.05)), max_positive=1.0, - min_abs=1.5, + min_abs=2.0, max_abs=ScheduledFloat((0.0, 5.0), (8000.0, 10.0), default=1.0), ) self.sigmoid = nn.Sigmoid()