Cosmetic changes

This commit is contained in:
Daniel Povey 2022-10-18 12:49:14 +08:00
parent b988bc0e33
commit b37564c9c9
2 changed files with 2 additions and 2 deletions

View File

@ -1085,7 +1085,6 @@ class RelPositionMultiheadAttention(nn.Module):
q = q.permute(1, 2, 0, 3) # (batch, head, time1, head_dim) q = q.permute(1, 2, 0, 3) # (batch, head, time1, head_dim)
p = p.permute(1, 2, 0, 3) # (batch, head, time1, head_dim // 2) p = p.permute(1, 2, 0, 3) # (batch, head, time1, head_dim // 2)
# compute attention score
k = k.permute(1, 2, 3, 0) # (batch, head, d_k, time2) k = k.permute(1, 2, 3, 0) # (batch, head, d_k, time2)

View File

@ -121,7 +121,8 @@ def add_model_arguments(parser: argparse.ArgumentParser):
"--attention-dims", "--attention-dims",
type=str, type=str,
default="192,192", default="192,192",
help="Attention dimension in the 2 blocks of conformer encoder layers, comma separated" help="""Attention dimension in the 2 blocks of conformer encoder layers, comma separated;
not the same as embedding dimension."""
) )
parser.add_argument( parser.add_argument(