Update params in train.py

This commit is contained in:
Guanbo Wang 2022-04-06 19:06:22 -04:00
parent 79211633ed
commit 64bb39bf17

View File

@ -117,7 +117,7 @@ def get_parser():
parser.add_argument( parser.add_argument(
"--att-rate", "--att-rate",
type=float, type=float,
default=0.8, default=0.7,
help="""The attention rate. help="""The attention rate.
The total loss is (1 - att_rate) * ctc_loss + att_rate * att_loss The total loss is (1 - att_rate) * ctc_loss + att_rate * att_loss
""", """,
@ -201,9 +201,9 @@ def get_params() -> AttributeDict:
"best_train_epoch": -1, "best_train_epoch": -1,
"best_valid_epoch": -1, "best_valid_epoch": -1,
"batch_idx_train": 0, "batch_idx_train": 0,
"log_interval": 50, "log_interval": 500,
"reset_interval": 200, "reset_interval": 2000,
"valid_interval": 3000, "valid_interval": 30000,
# parameters for conformer # parameters for conformer
"feature_dim": 80, "feature_dim": 80,
"subsampling_factor": 4, "subsampling_factor": 4,
@ -217,7 +217,7 @@ def get_params() -> AttributeDict:
"use_double_scores": True, "use_double_scores": True,
# parameters for Noam # parameters for Noam
"weight_decay": 1e-6, "weight_decay": 1e-6,
"warm_step": 80000, "warm_step": 100000,
"env_info": get_env_info(), "env_info": get_env_info(),
} }
) )