From c3da411351f88017ff1a14eebdad0a987f99b50e Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Sat, 10 Dec 2022 13:06:29 +0900 Subject: [PATCH] from local --- .../.train.py.swp | Bin 65536 -> 65536 bytes .../train.py | 6 +++++- icefall/.utils.py.swp | Bin 65536 -> 65536 bytes 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp index b66bcc0a90483c351f049ce574c0ab88e3c14de8..742ca4b0344f293c771d4b2b832ef0486795c4d4 100644 GIT binary patch delta 486 zcmXZY&nv@m9LMpu^|cwkwr}$5WD0ASl}$*5+J)KdAnCiyEG2UpB`MjGyUiz3YRbWc zgSDHJKfr;S14a1{R+O~Dv$l6V>fP)0KE2avEuGeGokm@(Xc(Vfecd9WYa-$li78`p zI91U_K5&mD7T`rYzM4huaEoIcVG}dx!FQ9$E6#C(T}-1Bt$1k^DIt#w?7>6>Y z2^K$WDAjX-JQNO*#SWG*fiC>;!)II}g%ylJ*9uQt7Uc_gi`RjmO(k-=<{or=)Ks)X zX|v^O;I9Sq=8!rKcd0YeuFTM6we(SDLmMidYt!8WZjZQXqvqn>YBAcQTnSBWM@Q92 K$WhFMlZAgOcv?XK delta 435 zcmW;IKP&@r7{>9p?P;~#=`AW!QL#6TMIs^{u8Du`V3J;k8dZWts*&m?7B=xij1pq< zujwQf0|uswM6elY{Eyi9GJM|m8J_8J9L;gG!=UNPR7~?Ds=v8KikgT7MKa2acb7bd z$b&Ajf*eK=!&jq74NthlCh|xkhVKTEXB=S>3mAZf5SsBG5V^oC^5}(uE5FD(*8EbE z5@8-iWRXB8e(FW8af}@}n8GAdNFt0+pU5pvu!mL5U;=TpAOyj^SL6;`$iwpXQy_%k zfg3K=>O`tIz!JuhKr3F@?*_a0+j0z9tz6x9E8U8^BHVCAw+cQrXd7yO==J}f4{xYI i+E9<_K6RRGQ(bfIYSivfk({Oqc8@xGCfNPf%(A diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py index 2eb62f41c..99a35df64 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py @@ -735,14 +735,17 @@ def compute_loss( subsampling_factor=params.subsampling_factor, token_ids=token_ids, ) - + + logging.info('1') # Works with a BPE model decoding_graph = k2.ctc_graph(token_ids, modified=False, device=device) + logging.info('2') dense_fsa_vec = k2.DenseFsaVec( ctc_output, supervision_segments, allow_truncate=params.subsampling_factor - 1, ) + logging.info('3') ctc_loss = k2.ctc_loss( decoding_graph=decoding_graph, @@ -751,6 +754,7 @@ def compute_loss( reduction="sum", use_double_scores=params.use_double_scores, ) + logging.info('4') assert ctc_loss.requires_grad == is_training loss += params.ctc_loss_scale * ctc_loss diff --git a/icefall/.utils.py.swp b/icefall/.utils.py.swp index d34e2c3e61feafff92e94bd282d1ebbb8a74f493..c521c2866ef7a469da17f9fb0fc8ce31a47397ab 100644 GIT binary patch delta 33 ncmZo@U}