Reduce encoder-dim and num-heads of center stack.

This commit is contained in:
Daniel Povey 2023-05-29 17:32:49 +08:00
parent cdd9cf695f
commit cd36d149df

View File

@ -136,14 +136,14 @@ def add_model_arguments(parser: argparse.ArgumentParser):
parser.add_argument(
"--num-heads",
type=str,
default="4,4,8,16,32,16,8,4,4",
default="4,4,8,16,16,16,8,4,4",
help="Number of attention heads in the subformer encoder layers: a single int or comma-separated list.",
)
parser.add_argument(
"--encoder-dim",
type=str,
default="256,384,512,768,1024,768,512,384,256",
default="256,384,512,768,768,768,512,384,256",
help="Embedding dimension in encoder stacks: a single int or comma-separated list."
)