diff --git a/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp b/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp index 9c665afcd..d9ba1374c 100644 Binary files a/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp and b/egs/librispeech/ASR/incremental_transf/.identity_train.py.swp differ diff --git a/egs/librispeech/ASR/incremental_transf/identity_train.py b/egs/librispeech/ASR/incremental_transf/identity_train.py index 9209e751b..e5a81491b 100755 --- a/egs/librispeech/ASR/incremental_transf/identity_train.py +++ b/egs/librispeech/ASR/incremental_transf/identity_train.py @@ -966,10 +966,11 @@ def run(rank, world_size, args): logging.info("About to create model") model = get_transducer_model(params) - ''' - pre_trained_model = torch.load('/home/work/workspace/icefall/egs/librispeech/ASR/pruned_transducer_stateless5/exp-B/epoch-30.pt') + pre_trained_model = torch.load('/home/work/workspace/icefall/egs/librispeech/ASR/incremental_trans/conformer_12layers.pt') pre_trained_model = pre_trained_model['model'] - + model.load_state_dict(pre_trained_model) + + ''' for n, p in model.named_parameters(): if 'layer' not in n: try: p.data = pre_trained_model[n] @@ -983,9 +984,8 @@ def run(rank, world_size, args): except: print(f'pre-trained model has no parameter named {n}.') else: print(f'skipping param load {n}') - - exit() ''' + exit() num_param = sum([p.numel() for p in model.parameters()]) logging.info(f"Number of model parameters: {num_param}")