add cosyvoice scheduler

This commit is contained in:
root 2025-01-09 07:53:28 +00:00
parent 84f6e0da4e
commit 3f8e64b9dc
2 changed files with 11 additions and 7 deletions

View File

@ -290,6 +290,8 @@ class CFM(nn.Module):
# time step
time = torch.rand((batch,), dtype=dtype, device=self.device)
# add cosyvoice cosine scheduler
time = 1 - torch.cos(time * 0.5 * torch.pi)
# TODO. noise_scheduler
# sample xt (φ_t(x) in the paper)

View File

@ -1007,13 +1007,15 @@ def run(rank, world_size, args):
)
else:
raise NotImplementedError()
# scheduler = Eden(optimizer, 5000, 4, warmup_batches=params.warmup_steps)
warmup_scheduler = LinearLR(optimizer, start_factor=1e-8, end_factor=1.0, total_iters=params.warmup_steps)
decay_scheduler = LinearLR(optimizer, start_factor=1.0, end_factor=1e-8, total_iters=params.decay_steps)
scheduler = SequentialLR(
optimizer, schedulers=[warmup_scheduler, decay_scheduler], milestones=[params.warmup_steps]
)
if params.decay_steps:
warmup_scheduler = LinearLR(optimizer, start_factor=1e-8, end_factor=1.0, total_iters=params.warmup_steps)
decay_scheduler = LinearLR(optimizer, start_factor=1.0, end_factor=1e-8, total_iters=params.decay_steps)
scheduler = SequentialLR(
optimizer, schedulers=[warmup_scheduler, decay_scheduler], milestones=[params.warmup_steps]
)
assert 1==2
else:
scheduler = Eden(optimizer, 50000, 10, warmup_batches=params.warmup_steps)
optimizer.zero_grad()