From ed7085a03fefd82e3e59d11112437b5c6d06bba5 Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Sat, 10 Dec 2022 15:07:44 +0900 Subject: [PATCH] from local --- .../.model.py.swp | Bin 4096 -> 16384 bytes .../.model.py.swp | Bin 16384 -> 20480 bytes .../model.py | 25 ++++++++++++++++++ 3 files changed, 25 insertions(+) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v/.model.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v/.model.py.swp index 5c46a7f580e7a27e8b5494b0758de736041b6563..26cb0bd9cd9617258e2a6604b5dbb8140f2722f1 100644 GIT binary patch literal 16384 zcmeHOU5q4E6)wdeene3dV?u;$PvTZ%PxZ{~0@~P+*&o~qyR$kyyDOOtwbfO3x@)KY zs9Uwuy@?n@%&LGGUi3*Hj6W~BN_ZeX7$Y%0VS@Uif*9ii{=|TZqHYwwbL-Y$&nys) zF(FmiZ@cRLoO92)_nv#sshZ)!sg>jGgT{h}>pD$~fBA^r`ub0gBG(Xvb4`{_~qU*WC*cB1y z_PDz|SnEb{8gR!iIs4V3t>7-=emCAaL5n)XxR`@j^q1^DNcn)V0a8^8tNHsDWh*0kRMKLYLt zP6MZaTY+~2m)?Xi0bc=3;2z*Oa0l={;7Z_+Z`8CO0~dkM0|FQTUEmtv**9P;zz>1P zfd_yo&<3srG~glE9T4&1L2~h8;sZ%M$9dH%(i07m%`ei z51DYQq%|snFh+WjpQ$i>q%z{Ne;fpfR7{7t3G+F=USZ>Khxt}8Wn$_NLr>^nn(#?d zRg0u!N;-ATq)Eb2Z1XzrW$+x0xWf`22#i}!!xFP5T(AxF+1&7~A@?>7wvv#LiREG> zvP{q%k(?D_rgf%64qZSv%WkQqW4OR4_MS zXC`MVj3=fC0qWeB+5Wvbu=Nf*)UFO^QubVqM-@7)R!eeev-#WEdK!6r0}Ij~8|zYE z9W3SYX|tpfVI?GHrqmuAhM~v$>`u!Qyi<%CA0Am#Gva~eCDR_;@Pb*YZsIy3FY>7&o_P#&$NuC}@(h zoBF`j&`_)H>ead7=`(d#(zb-)al*!yu#%*ZvBO$J&kD9p*J+jVskW}bHce=|>xx{5 z9m!1F3T#Hah0o^NSiIAyXi(A+X~bi_ZI~uWY?^vmm2U^8uEHXuK@i5i!G&ai5V%B;atJI!Z@oFNHP|^oUc+It%WwT0;j?wo8xlF6IT!w z<1qD{+*GnK!1;h#bBD11J$5FavNLtjm82&nlZs5rD(2O%AP}07wN&ROj7ENNlD(8Q zFtIV2SF)gV%1Uu7LMeUKr83SykCtB-7@W~C>w#_cqYIS!9ip2n7kAL+@7>{JB_Y#C|MeKdlLlO&Po zb-PwPanBiHJnBZyR(Ek}anaa+EB?ciW_vJl1us~tLhg1CUhGyJ`b>QhY@~_nVXga7 z2m=Z)#1x@WO~`vBZ%tKJkCBWfx33l@qIY zx|1!h-p%e>Sv`z>3f~+f;S*REs2*-2O_|Fb8j$0{F_GrY**#*0O;>Gz9cKlswLD-e zgBCluJXjfYK;vXU-(#~F3(1AYL#KolfdkHOL<^p^Lp-ffjB}S_2UxS67-w-X>M%M< z;3sfx4s__EC4?2Uv$JDZ_&R*d-(xB)bTRC)Ob#P>U316#Im%GChPW6<9Lvn4I z1KL8#EJ_k@9;6=pu7lE$< z_W}oj>w&)`uKxw_P2jV@Ch!5^CZG+xgczRU{@(*n15W^71wI3O97us1fg6C|A+~=G zcog^!@E~wMumju%TnGFLvHml_W5B0@3%~%l3b+#ZIpTiG0el%qfxCelfcF3|Am;xP za363Fum*e-xC8h#bo3DLATR{_zyW}CcO%eTjRqPGG#YpA#E8{E_m)z%GiKdxY@rWmgn-N@Hp220vgSx<2H#SQl z_U+qO2s8}g5L)qcqt{{kH_mTWMOhK7?~1MMLOaGGv`(1U+JPoz~0Xm%ZWrJSDCgd72-fdUyaClcA(0+N~^+p z6RvMfOrzAa>;k?j;L)e#CJQ8r`nwtAB{PaJ^74{uww%x|4tJ0P!_E{Ni@i-YUoGg( zFP2kNHbk$fA@c@CS*R~o+dWoL7f~6v|Id@N*#zm5c`*WI-t|01i`P2X)7jtO(mS4t-WjfP z^681LBH7(9?n;@mCJbw%k~Qzwo_06RZDv2e+1&)ez3&DxVluNAE5$C^-B`X@5&i6E zceQ#k)6Xr)?iMnYCi>ZrAyfW(yWXs#F|0)-+38)7NeV=|n*NwMQ#lj6jsgST;tA+kTu`kbKs*aT5DEAw(DU`SrEHj%(I7oVQ) K;nokkw)StIdeE2v delta 7 OcmZo@U~EuWAOHXgJOWYx diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.model.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.model.py.swp index 1f5c2a1bf1189bdf4166728d1a836e0e96b30db9..2fd39173544ae1504ff9da05d6ec6661119e2250 100644 GIT binary patch delta 1534 zcmb7^&ubGw6vyA1NZKZ~2^4a4YS1Q-#7Lo7H45H5^q_}&5Lq_a$q&+Gm)(h_2ySjF z2qG&U^dfku9&IlE3k4A>{y^|QPz#C&PvUp-Yg3aZ&4bVGX5O3cn|W_CjfT>Au0(U0 zwJWP^C7mHynCSG0$N9H2A6`e_`;OK{A$7L7=bH~HKGzpcDHZojI2!TGP}p?)MPHgL z;jOuC<`2(a!cgD{9#E<@Bbg25GWQW(2NwYYe((n&KfyEb6mXz}G)RFdAZ7k4M8A|r zyK(o&!c>TuXYPyaqPvs5r4}rs%5q$*sy5fGe2JB-reO(Iv^du{JA-OY6Zukd*q|yj ztH^~~u9cd*Ciu1B>7k_!=U53AREMrQ7(p2CjnR;CGPd zO|Yr>V?kLa_s&wlDQ%iI%H3yjB(0eyujz4or)_h+EaHg-v0j+PMct~g5pxe4>Uep) zRg9BAthm%3qw2_w5AJx94F6Y2D#8keC50!>Pp#Q_jH^PoL1QvVx$Xu@{*czwcD(LW z%hT`ld~H`(X5eH@ump6ZKaVr|7!4f3&fHFMS-Hk+Gp+M|nnNTH^tbvC(f)vmb~C#B zy#=mTJ@m{*xVxjT6~- RdwGfr2k6OGV>LH!(m#z=fcgLc delta 98 zcmZozz}V2hD4t{x=IN_osb|0d1Of~UQXeNJ|K4_KqxdD}i4Ryezhf5RVPu-jETGF6 zJ=st|d~=4t4E~7^6gDZa2=KmRWMC*`hUkmlEND>9xbgBk#>sCLIX2fBH}L=fp|2g8 diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/model.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/model.py index 9f2305393..dc250e218 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/model.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/model.py @@ -196,3 +196,28 @@ class Transducer(nn.Module): ) return (simple_loss, pruned_loss, ctc_output) + + def decode( + self, + x: torch.Tensor, + x_lens: torch.Tensor, + y: k2.RaggedTensor, + sp, + ): + from beam_search import greedy_search_batch, greedy_search_batch_target_input + + assert x.size(0) == x_lens.size(0) == y.dim0 + + encoder_out, x_lens = self.encoder(x, x_lens) + + assert torch.all(x_lens > 0) + + hyps = [] + #hyp_tokens = greedy_search_batch_target_input(self, encoder_out, x_lens, decoder_out) + hyp_tokens = greedy_search_batch(self, encoder_out, x_lens)#, decoder_out) + + for hyp in sp.decode(hyp_tokens): + hyps.append(hyp.split()) + + return hyps +