From 573e0582d8319c5b23044fe3c5f6ebef8c7f8557 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Mon, 30 Aug 2021 14:10:21 +0800 Subject: [PATCH] Run in exp_2, with foam from start, knee_factor=5.0, initial_lrate=2e-04. --- egs/librispeech/ASR/conformer_lm/madam.py | 2 +- egs/librispeech/ASR/conformer_lm/train.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/egs/librispeech/ASR/conformer_lm/madam.py b/egs/librispeech/ASR/conformer_lm/madam.py index 07266a63b..36716efec 100644 --- a/egs/librispeech/ASR/conformer_lm/madam.py +++ b/egs/librispeech/ASR/conformer_lm/madam.py @@ -852,7 +852,7 @@ class Foam(object): params, max_lrate: float = 5.0e-04, warm_step: int = 25000, - knee_factor: float = 8.0, + knee_factor: float = 5.0, min_target_rms: float = 0.05, limit_grad_factor: float = float('inf'), l2_period: int = 1) -> None: diff --git a/egs/librispeech/ASR/conformer_lm/train.py b/egs/librispeech/ASR/conformer_lm/train.py index 5ca267147..4c0219eb1 100755 --- a/egs/librispeech/ASR/conformer_lm/train.py +++ b/egs/librispeech/ASR/conformer_lm/train.py @@ -132,13 +132,13 @@ def get_params() -> AttributeDict: """ params = AttributeDict( { - "exp_dir": Path("conformer_lm/exp_1"), + "exp_dir": Path("conformer_lm/exp_2"), "lm_dataset": Path("data/lm_training_5000/lm_data.pt"), "num_tokens": 5000, "blank_sym": 0, "bos_sym": 1, "eos_sym": 1, - "start_epoch": 3, + "start_epoch": 0, "num_epochs": 20, "num_valid_batches": 200, "symbols_per_batch": 5000, @@ -155,7 +155,7 @@ def get_params() -> AttributeDict: "attention_dim": 512, "nhead": 8, "num_decoder_layers": 6, - "max_lrate": 5.0e-04 + "max_lrate": 2.0e-04 # was 5.0e-04, then from start_epoch=9 used max_lrate=2.0e-04, then from start_epoch=11 used 1.0e-04. } )