From a067fe8026500ce75dffdb6dc32cca74e295e335 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Fri, 28 Oct 2022 12:50:14 +0800 Subject: [PATCH] Fix clamping of epsilon --- egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py b/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py index 742c314fa..f43fae528 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py @@ -382,8 +382,7 @@ class BasicNorm(torch.nn.Module): # region if it happens to exit it. eps = eps.clamp(min=self.eps_min, max=self.eps_max) scales = ( - torch.mean(x ** 2, dim=self.channel_dim, keepdim=True) - + self.eps.exp() + torch.mean(x ** 2, dim=self.channel_dim, keepdim=True) + eps.exp() ) ** -0.5 return x * scales