From caa1d41b221f45d5709fad3773be08b1676a466d Mon Sep 17 00:00:00 2001 From: JinZr Date: Mon, 21 Oct 2024 17:17:59 +0800 Subject: [PATCH] minor updates to the TTS & CODEC recipes --- egs/libritts/CODEC/encodec/train.py | 18 +++++++++--------- egs/libritts/TTS/vits/train.py | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/egs/libritts/CODEC/encodec/train.py b/egs/libritts/CODEC/encodec/train.py index bf231c5b6..a4f2eb7ab 100755 --- a/egs/libritts/CODEC/encodec/train.py +++ b/egs/libritts/CODEC/encodec/train.py @@ -138,7 +138,7 @@ def get_parser(): parser.add_argument( "--save-every-n", type=int, - default=1, + default=5, help="""Save checkpoint after processing this number of epochs" periodically. We save checkpoint to exp-dir/ whenever params.cur_epoch % save_every_n == 0. The checkpoint filename @@ -1093,14 +1093,14 @@ def run(rank, world_size, args): rank=rank, ) - # if not params.print_diagnostics: - # scan_pessimistic_batches_for_oom( - # model=model, - # train_dl=train_dl, - # optimizer_g=optimizer_g, - # optimizer_d=optimizer_d, - # params=params, - # ) + if not params.print_diagnostics: + scan_pessimistic_batches_for_oom( + model=model, + train_dl=train_dl, + optimizer_g=optimizer_g, + optimizer_d=optimizer_d, + params=params, + ) scaler = GradScaler(enabled=params.use_fp16, init_scale=1.0) if checkpoints and "grad_scaler" in checkpoints: diff --git a/egs/libritts/TTS/vits/train.py b/egs/libritts/TTS/vits/train.py index 1d2870ed8..d89de9608 100755 --- a/egs/libritts/TTS/vits/train.py +++ b/egs/libritts/TTS/vits/train.py @@ -212,7 +212,7 @@ def get_params() -> AttributeDict: "log_interval": 50, "valid_interval": 200, "env_info": get_env_info(), - "sampling_rate": 22050, + "sampling_rate": 24000, "frame_shift": 256, "frame_length": 1024, "feature_dim": 513, # 1024 // 2 + 1, 1024 is fft_length