From 958d9b929d4b3851137280b772c1628ce5c6a40c Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Fri, 9 Dec 2022 21:00:24 +0800 Subject: [PATCH] Double limit of penalize_abs_values_gt in AttentionDownsample from 10 to 20. --- egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py index e22db4d34..c2b3e81b2 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py @@ -829,7 +829,7 @@ class AttentionDownsample(torch.nn.Module): scores = (src * self.query).sum(dim=-1, keepdim=True) scores = penalize_abs_values_gt(scores, - limit=10.0, + limit=20.0, penalty=1.0e-04) weights = scores.softmax(dim=1)