Config changes, bug fix

This commit is contained in:
Daniel Povey 2021-09-20 13:39:25 +08:00
parent 2bad68a8ed
commit ed84795b47
2 changed files with 4 additions and 4 deletions

View File

@ -945,7 +945,7 @@ class SampleAndPredict(nn.Module):
if reverse_grad: if reverse_grad:
tot_prob = reverse_gradient(tot_prob) tot_prob = reverse_gradient(tot_prob)
return tot_prob return tot_prob
class ConformerEncoderLayer(nn.Module): class ConformerEncoderLayer(nn.Module):

View File

@ -155,7 +155,7 @@ def get_params() -> AttributeDict:
""" """
params = AttributeDict( params = AttributeDict(
{ {
"exp_dir": Path("conformer_ctc_bn/exp_gloam_5e-4_0.85_discrete8"), "exp_dir": Path("conformer_ctc_bn_2d/exp_bidirectional_1"),
"lang_dir": Path("data/lang_bpe"), "lang_dir": Path("data/lang_bpe"),
"feature_dim": 80, "feature_dim": 80,
"subsampling_factor": 4, # can't be changed "subsampling_factor": 4, # can't be changed
@ -171,8 +171,8 @@ def get_params() -> AttributeDict:
"reduction": "sum", "reduction": "sum",
"use_double_scores": True, "use_double_scores": True,
"accum_grad": 1, "accum_grad": 1,
"att_scale": 0.4, "att_scale": 0.7,
"reverse_att_scale": 0.4, # ctc_scale == 1.0 - att_scale - reverse_att_scale "reverse_att_scale": 0.01, # ctc_scale == 1.0 - att_scale - reverse_att_scale
"attention_dim": 512, "attention_dim": 512,
"nhead": 8, "nhead": 8,
"num_trunk_encoder_layers": 12, "num_trunk_encoder_layers": 12,