mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-12-11 06:55:27 +00:00
fix a bug about the model_avg during finetuning by exchanging the order of loading pre-trained model and initializing avg model
This commit is contained in:
parent
807816fec0
commit
23375cb64f
@ -1077,12 +1077,6 @@ def run(rank, world_size, args):
|
|||||||
num_param = sum([p.numel() for p in model.parameters()])
|
num_param = sum([p.numel() for p in model.parameters()])
|
||||||
logging.info(f"Number of model parameters: {num_param}")
|
logging.info(f"Number of model parameters: {num_param}")
|
||||||
|
|
||||||
assert params.save_every_n >= params.average_period
|
|
||||||
model_avg: Optional[nn.Module] = None
|
|
||||||
if rank == 0:
|
|
||||||
# model_avg is only used with rank 0
|
|
||||||
model_avg = copy.deepcopy(model).to(torch.float64)
|
|
||||||
|
|
||||||
# load model parameters for model fine-tuning
|
# load model parameters for model fine-tuning
|
||||||
if params.do_finetune:
|
if params.do_finetune:
|
||||||
modules = params.init_modules.split(",") if params.init_modules else None
|
modules = params.init_modules.split(",") if params.init_modules else None
|
||||||
@ -1095,6 +1089,12 @@ def run(rank, world_size, args):
|
|||||||
params=params, model=model, model_avg=model_avg
|
params=params, model=model, model_avg=model_avg
|
||||||
)
|
)
|
||||||
|
|
||||||
|
assert params.save_every_n >= params.average_period
|
||||||
|
model_avg: Optional[nn.Module] = None
|
||||||
|
if rank == 0:
|
||||||
|
# model_avg is only used with rank 0
|
||||||
|
model_avg = copy.deepcopy(model).to(torch.float64)
|
||||||
|
|
||||||
model.to(device)
|
model.to(device)
|
||||||
if world_size > 1:
|
if world_size > 1:
|
||||||
logging.info("Using DDP")
|
logging.info("Using DDP")
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user