mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-12-09 14:05:33 +00:00
Use a different seed for each epoch.
This commit is contained in:
parent
791f54c8c2
commit
407e8aeff7
@ -625,7 +625,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = aishell.valid_dataloaders(aishell.valid_cuts())
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
cur_lr = optimizer._rate
|
||||
|
||||
@ -620,7 +620,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = aishell.valid_dataloaders(aishell.valid_cuts())
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
cur_lr = optimizer._rate
|
||||
|
||||
@ -564,7 +564,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = aishell.valid_dataloaders(aishell.valid_cuts())
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
if epoch > params.start_epoch:
|
||||
|
||||
@ -618,7 +618,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = aishell.valid_dataloaders(aishell.valid_cuts())
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
cur_lr = optimizer._rate
|
||||
|
||||
@ -691,7 +691,7 @@ def run(rank, world_size, args):
|
||||
)
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
cur_lr = optimizer._rate
|
||||
|
||||
@ -768,7 +768,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = librispeech.valid_dataloaders()
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
if (
|
||||
params.batch_idx_train >= params.use_ali_until
|
||||
|
||||
@ -738,7 +738,7 @@ def run(rank, world_size, args):
|
||||
)
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
cur_lr = optimizer._rate
|
||||
|
||||
@ -652,7 +652,7 @@ def run(rank, world_size, args):
|
||||
)
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
cur_lr = optimizer._rate
|
||||
|
||||
@ -551,7 +551,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = librispeech.valid_dataloaders(valid_cuts)
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
if epoch > params.start_epoch:
|
||||
|
||||
@ -640,7 +640,7 @@ def run(rank, world_size, args):
|
||||
)
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
cur_lr = optimizer._rate
|
||||
|
||||
@ -646,7 +646,7 @@ def run(rank, world_size, args):
|
||||
)
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
cur_lr = optimizer._rate
|
||||
|
||||
@ -658,7 +658,7 @@ def run(rank, world_size, args):
|
||||
)
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
cur_lr = optimizer._rate
|
||||
|
||||
@ -543,7 +543,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = timit.valid_dataloaders()
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
if epoch > params.start_epoch:
|
||||
|
||||
@ -543,7 +543,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = timit.valid_dataloaders()
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
if epoch > params.start_epoch:
|
||||
|
||||
@ -527,7 +527,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = yes_no.test_dataloaders()
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
if tb_writer is not None:
|
||||
|
||||
@ -539,7 +539,7 @@ def run(rank, world_size, args):
|
||||
valid_dl = yes_no.test_dataloaders()
|
||||
|
||||
for epoch in range(params.start_epoch, params.num_epochs):
|
||||
fix_random_seed(params.seed)
|
||||
fix_random_seed(params.seed + epoch)
|
||||
train_dl.sampler.set_epoch(epoch)
|
||||
|
||||
if tb_writer is not None:
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user