diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer.py.swp index a6c6a19e3..f58bd1351 100644 Binary files a/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer.py.swp and b/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer.py.swp differ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer_randomcombine.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer_randomcombine.py.swp index 81e7151f4..5ecc7d0ab 100644 Binary files a/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer_randomcombine.py.swp and b/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/.conformer_randomcombine.py.swp differ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/conformer.py b/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/conformer.py index 928610fd2..3a3a5e43c 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/conformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_gtrans/conformer.py @@ -124,6 +124,11 @@ class Conformer(EncoderInterface): self.encoder = ConformerEncoder(encoder_layer, num_encoder_layers) self._init_state: List[torch.Tensor] = [torch.empty(0)] + self.group_size = 6 + self.alpha = nn.Parameter(torch.rand(self.group_size)) + self.sigmoid = nn.Sigmoid() + self.layer_norm = nn.LayerNorm(512) + def forward( self, x: torch.Tensor, x_lens: torch.Tensor, warmup: float = 1.0 ) -> Tuple[torch.Tensor, torch.Tensor]: