mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-12-11 06:55:27 +00:00
from local
This commit is contained in:
parent
86df081ca0
commit
bbd50fda4c
Binary file not shown.
@ -966,14 +966,19 @@ def run(rank, world_size, args):
|
||||
logging.info("About to create model")
|
||||
model = get_transducer_model(params)
|
||||
|
||||
path = '/home/work/workspace/icefall/egs/librispeech/ASR/incremental_transf/conformer_24layers.pt'
|
||||
pre_trained = torch.load(path)
|
||||
pre_trained_model = pre_trained["model"]
|
||||
#for n, p in model.named_parameters():
|
||||
# if 'layer' not in n:
|
||||
for n, p in pre_trained_model.items():
|
||||
print(n)
|
||||
exit()
|
||||
path1 = '/home/work/workspace/icefall/egs/librispeech/ASR/incremental_transf/conformer_12layers.pt'
|
||||
pre_trained1 = torch.load(path1)
|
||||
pre_trained_model1 = pre_trained1["model"]
|
||||
|
||||
path2 = '/home/work/workspace/icefall/egs/librispeech/ASR/incremental_transf/conformer_24layers.pt'
|
||||
pre_trained2 = torch.load(path2)
|
||||
pre_trained_model2 = pre_trained2["model"]
|
||||
|
||||
for n, p in model.named_parameters():
|
||||
if 'layer' not in n:
|
||||
|
||||
layer_name_splited = n.split('.')
|
||||
|
||||
|
||||
num_param = sum([p.numel() for p in model.parameters()])
|
||||
logging.info(f"Number of model parameters: {num_param}")
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user