diff --git a/egs/aishell/ASR/conformer_ctc/.train.py.swp b/egs/aishell/ASR/conformer_ctc/.train.py.swp index 739d71baa..03b02834a 100644 Binary files a/egs/aishell/ASR/conformer_ctc/.train.py.swp and b/egs/aishell/ASR/conformer_ctc/.train.py.swp differ diff --git a/egs/aishell/ASR/conformer_ctc/.transformer.py.swp b/egs/aishell/ASR/conformer_ctc/.transformer.py.swp index e757206c5..2eca2d523 100644 Binary files a/egs/aishell/ASR/conformer_ctc/.transformer.py.swp and b/egs/aishell/ASR/conformer_ctc/.transformer.py.swp differ diff --git a/egs/aishell/ASR/conformer_ctc/train.py b/egs/aishell/ASR/conformer_ctc/train.py index 823f603da..41cce35f9 100755 --- a/egs/aishell/ASR/conformer_ctc/train.py +++ b/egs/aishell/ASR/conformer_ctc/train.py @@ -588,6 +588,12 @@ def run(rank, world_size, args): ) logging.info("About to create model") + model = Transformer( + num_features=params.feature_dim, + num_classes=num_classes, + use_feat_batchnorm=params.use_feat_batchnorm, + ) + ''' model = Conformer( num_features=params.feature_dim, nhead=params.nhead, @@ -599,6 +605,7 @@ def run(rank, world_size, args): vgg_frontend=False, use_feat_batchnorm=params.use_feat_batchnorm, ) + ''' checkpoints = load_checkpoint_if_available(params=params, model=model)