From d3876e32c488058341e093fd5ec3bb5cc0613762 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Sun, 23 Oct 2022 21:13:23 +0800 Subject: [PATCH] Make it use float16 if in amp but use clamp to avoid wrapping error --- egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py b/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py index d4c288545..7e1b9a822 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py @@ -922,8 +922,7 @@ class DoubleSwishFunction(torch.autograd.Function): if requires_grad: # discretize s. This should be expectation-preserving if we just divide the # result by 255. - s = s.to(torch.float) - s = ((s * 254.99) + torch.rand_like(s)).to(torch.uint8) + s = ((s * 255) + torch.rand_like(s)).clamp(max=255).to(torch.uint8) ctx.save_for_backward(s, y) return y