From f3274073082d99c848d74b51fc7d43497ee7515f Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Fri, 24 Jun 2022 13:20:20 +0800 Subject: [PATCH] Avoid error if svd fails --- .../ASR/pruned_transducer_stateless7/optim.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/optim.py b/egs/librispeech/ASR/pruned_transducer_stateless7/optim.py index 13038dc0f..3fc1f7798 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/optim.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/optim.py @@ -913,13 +913,14 @@ class NeutralGradient(Optimizer): P = torch.matmul(Y, Y.t()) if random.random() < 1.0: #0.025: - - # TEMP: - _,s,_ = P.svd() - print(f"Min,max eig of P: {s.min()},{s.max()}") - # TODO: remove this testing code. assert (P - P.t()).abs().mean() < 0.01 # make sure symmetric. + try: + P = 0.5 * (P + P.t()) + _,s,_ = P.svd() + print(f"Min,max eig of P: {s.min()},{s.max()}") + except: + pass # testing... note, this is only true modulo "eps" C_check = torch.matmul(torch.matmul(P, G), P) C_smoothed = torch.matmul(Q.t(), Q)