diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp index 8961359e4..d88c03f4e 100644 Binary files a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp and b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp differ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py index d3ff4f0e0..72304341d 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py @@ -1086,6 +1086,12 @@ def train_one_epoch( f"grad_scale is too small, exiting: {cur_grad_scale}" ) + if params.batch_idx_train > 4000 and loss > 300: + wb.log({"valid/loss": 10000}) + raise RunteimError( + f"divergence... exiting: loss={loss}" + ) + if batch_idx % (params.log_interval*params.accum_grads) == 0: if params.multi_optim: cur_enc_lr = scheduler_enc.get_last_lr()[0]