diff --git a/egs/aishell/ASR/conformer_ctc/.transformer.py.swp b/egs/aishell/ASR/conformer_ctc/.transformer.py.swp index 0401582c7..05e41821a 100644 Binary files a/egs/aishell/ASR/conformer_ctc/.transformer.py.swp and b/egs/aishell/ASR/conformer_ctc/.transformer.py.swp differ diff --git a/egs/aishell/ASR/conformer_ctc/transformer.py b/egs/aishell/ASR/conformer_ctc/transformer.py index 58de71c90..d85f7d7d8 100644 --- a/egs/aishell/ASR/conformer_ctc/transformer.py +++ b/egs/aishell/ASR/conformer_ctc/transformer.py @@ -401,7 +401,13 @@ class TransfEncoder(nn.TransformerEncoder): __constants__ = ['norm'] def __init__(self, encoder_layer, num_layers, norm=None, enable_nested_tensor=True, mask_check=True): - super(TransfEncoder, self).__init__() + super(TransfEncoder, self).__init__( + encoder_layer=encoder_layer, + num_layers=num_layers, + norm=norm, + enable_nested_tensor=enable_nested_tensor, + mask_check=mask_check + ) def forward(self, src: torch.Tensor, mask: Optional[torch.Tensor] = None, src_key_padding_mask: Optional[torch.Tensor] = None): """Pass the input through the encoder layers in turn.