Bug fix in train.py, fix optimzier name

This commit is contained in:
Daniel Povey 2022-09-16 14:10:42 +08:00
parent 257c961b66
commit 3b450c2682

View File

@ -66,7 +66,7 @@ from lhotse.cut import Cut
from lhotse.dataset.sampling.base import CutSampler
from lhotse.utils import fix_random_seed
from model import Transducer
from optim import Eden, PrAdam
from optim import Eden, ScaledAdam
from torch import Tensor
from torch.cuda.amp import GradScaler
from torch.nn.parallel import DistributedDataParallel as DDP
@ -926,10 +926,8 @@ def run(rank, world_size, args):
logging.info("Using DDP")
model = DDP(model, device_ids=[rank])
optimizer = PrAdam(model.parameters(),
lr=params.initial_lr,
max_block_size=512,
lr_update_period=(400, 5000))
optimizer = ScaledAdam(model.parameters(),
lr=params.initial_lr)
scheduler = Eden(optimizer, params.lr_batches, params.lr_epochs)