from local

This commit is contained in:
dohe0342 2023-01-10 01:04:23 +09:00
parent 86df081ca0
commit bbd50fda4c
2 changed files with 13 additions and 8 deletions

View File

@ -966,14 +966,19 @@ def run(rank, world_size, args):
logging.info("About to create model") logging.info("About to create model")
model = get_transducer_model(params) model = get_transducer_model(params)
path = '/home/work/workspace/icefall/egs/librispeech/ASR/incremental_transf/conformer_24layers.pt' path1 = '/home/work/workspace/icefall/egs/librispeech/ASR/incremental_transf/conformer_12layers.pt'
pre_trained = torch.load(path) pre_trained1 = torch.load(path1)
pre_trained_model = pre_trained["model"] pre_trained_model1 = pre_trained1["model"]
#for n, p in model.named_parameters():
# if 'layer' not in n: path2 = '/home/work/workspace/icefall/egs/librispeech/ASR/incremental_transf/conformer_24layers.pt'
for n, p in pre_trained_model.items(): pre_trained2 = torch.load(path2)
print(n) pre_trained_model2 = pre_trained2["model"]
exit()
for n, p in model.named_parameters():
if 'layer' not in n:
layer_name_splited = n.split('.')
num_param = sum([p.numel() for p in model.parameters()]) num_param = sum([p.numel() for p in model.parameters()])
logging.info(f"Number of model parameters: {num_param}") logging.info(f"Number of model parameters: {num_param}")