From 069125686eb2d574fa898d7258611d9a8fef5424 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Sat, 22 Oct 2022 15:08:07 +0800 Subject: [PATCH] Fixes to logging statements. --- egs/librispeech/ASR/pruned_transducer_stateless7/train.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/train.py b/egs/librispeech/ASR/pruned_transducer_stateless7/train.py index 258457f98..1030be528 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/train.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/train.py @@ -845,9 +845,9 @@ def train_one_epoch( if cur_grad_scale < 1.0 or (cur_grad_scale < 8.0 and batch_idx % 400 == 0): scaler.update(cur_grad_scale * 2.0) if cur_grad_scale < 0.01: - logging.warn("Grad scale is small: {cur_grad_scale}") + logging.warning(f"Grad scale is small: {cur_grad_scale}") if cur_grad_scale < 1.0e-05: - raise RuntimeError("grad_scale is too small, exiting: {cur_grad_scale}") + raise RuntimeError(f"grad_scale is too small, exiting: {cur_grad_scale}") if batch_idx % params.log_interval == 0: cur_lr = scheduler.get_last_lr()[0]