diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py b/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py index f819bdf7c..a4e78f25c 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/scaling.py @@ -244,7 +244,6 @@ class SoftmaxFunction(torch.autograd.Function): def softmax(x: Tensor, dim: int): - logging.info(f"torch.is_autocast_enabled()={torch.is_autocast_enabled()}, x dtype={x.dtype}") return SoftmaxFunction.apply(x, dim)