Have value dim in NonlinAttentionModule be half of num_channels

This commit is contained in:
Daniel Povey 2022-11-28 21:55:06 +08:00
parent 258d4f1353
commit 36a2f33a6f

View File

@ -1464,15 +1464,15 @@ class NonlinAttentionModule(nn.Module):
super().__init__()
self.ratio = ratio
assert channels % ratio == 0
self.in_proj = nn.Linear(channels, channels + channels // ratio, bias=True)
assert channels % (ratio * 2) == 0
self.in_proj = nn.Linear(channels, (channels + channels // ratio) // 2, bias=True)
# balancer that goes before the sigmoid. Have quite a large min_abs value, at 2.0,
# because we noticed that well-trained instances of this module have abs-value before the sigmoid
# starting from about 3, and poorly-trained instances of the module have smaller abs values
# before the sigmoid.
self.balancer = ActivationBalancer(
channels // ratio, channel_dim=-1,
channels // (2 * ratio), channel_dim=-1,
min_positive=ScheduledFloat((0.0, 0.1), (8000.0, 0.05)),
max_positive=1.0,
min_abs=2.0,
@ -1481,7 +1481,7 @@ class NonlinAttentionModule(nn.Module):
self.sigmoid = nn.Sigmoid()
self.activation = Identity() # for diagnostics.
self.out_proj = ScaledLinear(channels, channels,
self.out_proj = ScaledLinear(channels // 2, channels,
bias=True,
initial_scale=0.05)
@ -1508,13 +1508,13 @@ attn_weights: a Tensor of shape (num_heads, batch_size, seq_len, seq_len)
x = self.in_proj(x)
s = x[..., num_channels:]
x = x[..., :num_channels]
s = x[..., num_channels // 2:]
x = x[..., :num_channels // 2]
s = self.balancer(s)
s = self.sigmoid(s)
s = s.unsqueeze(-1).expand(-1, -1, -1, self.ratio).reshape(seq_len, batch_size, num_channels)
s = s.unsqueeze(-1).expand(-1, -1, -1, self.ratio).reshape(seq_len, batch_size, num_channels // 2)
x = x * s