From 87b8f78d0752e873ce2d1ae95bd55193eb390784 Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Sun, 8 Jan 2023 18:53:43 +0900 Subject: [PATCH] from local --- .../.conformer.py.swp | Bin 94208 -> 94208 bytes .../conformer.py | 10 ++++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer.py.swp index a2e17c052797d2c3d5ea8570e4c0e7873ce9634e..22d5cb27043c6d00af788c9a7c6b81933748e704 100644 GIT binary patch delta 496 zcmYk$ze@sP7zgm@inBCL8v@JahJrgS&7p{(O(Ez{h>)vJJMg^get;FIZ8;Sj(bCYL zP+U>q)>K<9=9W_+6gd_|_4O3#g%7;X%lmwvr_y9AP4+?yF=1yXWWTUKLqv7k+m4h= zK_V5>kb(%jjuPF02?k6F6=Gb2*u6K$^Hl%zuo)>o|JyTG1g)b)D zm+nAA-jf9C|L?RFxmd5skaLPEZ}DPY+ZLnU2-Pw*%Ut6nEEh`((SKRJ8J3coO|A?5 m%&e+&o!>K@o5RTkx0H0zlz6w@eHWJc9i6S`Zterj;>I6*oqNCl delta 316 zcmXZWze@sf9LDkIOC9N@bxIJ~a!}CLT1%5cLw`geR0yS}xCqj1H3S7mxCH$R))7KY zHMxlh1r6mA7&P42FR=GPA9%s%y*=K*@&;C`?3i?L;P|iB&xF+XFn~+*)U<(TvPD*VY;|MuqF@upUeV~h5Tg$rS@vw(HX7Lo4 z9&v>oEMW}&pcmRP|K;;NRV~!F-8FZ&QmU0VqiFsXOeP4wF0An={nJq`xzE5}G}MvO j=P9!>7gYz(c)$rtSh=a0vqbZ|XzpU}|Ft_wvzq?{4yZ?a diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/conformer.py b/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/conformer.py index 643858af0..984f23105 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/conformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/conformer.py @@ -86,7 +86,7 @@ class Conformer(EncoderInterface): short_chunk_size: int = 25, num_left_chunks: int = -1, causal: bool = False, - group_size: int = 12, + group_num: int = 12, ) -> None: super(Conformer, self).__init__() @@ -125,7 +125,7 @@ class Conformer(EncoderInterface): self.encoder = ConformerEncoder(encoder_layer, num_encoder_layers) self._init_state: List[torch.Tensor] = [torch.empty(0)] - self.group_size = group_size + self.group_num = group_num self.group_layer_num = int(self.encoder_layers // self.group_size) self.alpha = nn.Parameter(torch.rand(self.group_size)) self.sigmoid = nn.Sigmoid() @@ -202,6 +202,12 @@ class Conformer(EncoderInterface): x = x.permute(1, 0, 2) # (T, N, C) ->(N, T, C) layer_output = [x.permute(1, 0, 2) for x in layer_output] + + x = 0 + for enum, alpha in enumerate(self.alpha): + x += self.sigmoid(alpha*layer_output[(enum+1)*self.group_layer_num-1]) + + x = self.layer_norm(x/self.group_size) return x, lengths