mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-12-11 06:55:27 +00:00
from local
This commit is contained in:
parent
32796f37c5
commit
a05d12c637
Binary file not shown.
@ -966,10 +966,11 @@ def run(rank, world_size, args):
|
||||
logging.info("About to create model")
|
||||
model = get_transducer_model(params)
|
||||
|
||||
'''
|
||||
pre_trained_model = torch.load('/home/work/workspace/icefall/egs/librispeech/ASR/pruned_transducer_stateless5/exp-B/epoch-30.pt')
|
||||
pre_trained_model = torch.load('/home/work/workspace/icefall/egs/librispeech/ASR/incremental_trans/conformer_12layers.pt')
|
||||
pre_trained_model = pre_trained_model['model']
|
||||
|
||||
model.load_state_dict(pre_trained_model)
|
||||
|
||||
'''
|
||||
for n, p in model.named_parameters():
|
||||
if 'layer' not in n:
|
||||
try: p.data = pre_trained_model[n]
|
||||
@ -983,9 +984,8 @@ def run(rank, world_size, args):
|
||||
except: print(f'pre-trained model has no parameter named {n}.')
|
||||
else:
|
||||
print(f'skipping param load {n}')
|
||||
|
||||
exit()
|
||||
'''
|
||||
exit()
|
||||
|
||||
num_param = sum([p.numel() for p in model.parameters()])
|
||||
logging.info(f"Number of model parameters: {num_param}")
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user