diff --git a/egs/aishell/ASR/conformer_ctc/.transformer.py.swp b/egs/aishell/ASR/conformer_ctc/.transformer.py.swp index b8c661804..c92d73f15 100644 Binary files a/egs/aishell/ASR/conformer_ctc/.transformer.py.swp and b/egs/aishell/ASR/conformer_ctc/.transformer.py.swp differ diff --git a/egs/aishell/ASR/conformer_ctc/transformer.py b/egs/aishell/ASR/conformer_ctc/transformer.py index f8d30e403..c723fdf65 100644 --- a/egs/aishell/ASR/conformer_ctc/transformer.py +++ b/egs/aishell/ASR/conformer_ctc/transformer.py @@ -490,9 +490,11 @@ class TransfEncoder(nn.TransformerEncoder): convert_to_nested = True output = torch._nested_tensor_from_mask(output, src_key_padding_mask.logical_not(), mask_check=False) src_key_padding_mask_for_layers = None - + + outputs = [] for mod in self.layers: output = mod(output, src_mask=mask, src_key_padding_mask=src_key_padding_mask_for_layers) + outputs.append(output) if convert_to_nested: output = output.to_padded_tensor(0.)