diff --git a/egs/librispeech/ASR/incremental_transf/.conformer.py.swp b/egs/librispeech/ASR/incremental_transf/.conformer.py.swp index 0ab8cbcba..0175375e5 100644 Binary files a/egs/librispeech/ASR/incremental_transf/.conformer.py.swp and b/egs/librispeech/ASR/incremental_transf/.conformer.py.swp differ diff --git a/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp b/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp index 3fcaabe90..33277a7f7 100644 Binary files a/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp and b/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp differ diff --git a/egs/librispeech/ASR/incremental_transf/identity_train.py b/egs/librispeech/ASR/incremental_transf/identity_train.py index bc8f48dcf..8b722562f 100755 --- a/egs/librispeech/ASR/incremental_transf/identity_train.py +++ b/egs/librispeech/ASR/incremental_transf/identity_train.py @@ -982,7 +982,6 @@ def run(rank, world_size, args): transducer_model.load_state_dict(pre_trained_model, strict=True) model = get_interformer_model(transducer_model.encoder, params) - print(model) ''' for n, p in model.named_parameters(): if 'layer' not in n: @@ -998,7 +997,6 @@ def run(rank, world_size, args): else: print(f'skipping param load {n}') ''' - exit() num_param = sum([p.numel() for p in model.parameters()]) logging.info(f"Number of model parameters: {num_param}")