mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-12-11 06:55:27 +00:00
Cosmetic changes
This commit is contained in:
parent
b988bc0e33
commit
b37564c9c9
@ -1085,7 +1085,6 @@ class RelPositionMultiheadAttention(nn.Module):
|
|||||||
|
|
||||||
q = q.permute(1, 2, 0, 3) # (batch, head, time1, head_dim)
|
q = q.permute(1, 2, 0, 3) # (batch, head, time1, head_dim)
|
||||||
p = p.permute(1, 2, 0, 3) # (batch, head, time1, head_dim // 2)
|
p = p.permute(1, 2, 0, 3) # (batch, head, time1, head_dim // 2)
|
||||||
# compute attention score
|
|
||||||
k = k.permute(1, 2, 3, 0) # (batch, head, d_k, time2)
|
k = k.permute(1, 2, 3, 0) # (batch, head, d_k, time2)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -121,7 +121,8 @@ def add_model_arguments(parser: argparse.ArgumentParser):
|
|||||||
"--attention-dims",
|
"--attention-dims",
|
||||||
type=str,
|
type=str,
|
||||||
default="192,192",
|
default="192,192",
|
||||||
help="Attention dimension in the 2 blocks of conformer encoder layers, comma separated"
|
help="""Attention dimension in the 2 blocks of conformer encoder layers, comma separated;
|
||||||
|
not the same as embedding dimension."""
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user