Merge branch 'scaled_adam_exp896' into scaled_adam_exp904

This commit is contained in:
Daniel Povey 2023-01-16 13:18:42 +08:00
commit 80b2c751e3

View File

@ -489,7 +489,7 @@ class ZipformerEncoderLayer(nn.Module):
dropout)
self.nonlin_attention = NonlinAttention(embed_dim,
hidden_channels=embed_dim // 4)
hidden_channels=embed_dim // 2)
self.small_conv_module = SmallConvolutionModule(embed_dim)
@ -1602,7 +1602,7 @@ class NonlinAttention(nn.Module):
self.hidden_channels = hidden_channels
self.in_proj = nn.Linear(channels, hidden_channels * 2, bias=True)
self.in_proj = nn.Linear(channels, hidden_channels * 4, bias=True)
# balancer that goes before the sigmoid. Have quite a large min_abs value, at 2.0,
# because we noticed that well-trained instances of this module have abs-value before the sigmoid
@ -1617,7 +1617,10 @@ class NonlinAttention(nn.Module):
)
self.tanh = nn.Tanh()
self.activation = Identity() # for diagnostics.
self.identity1 = Identity() # for diagnostics.
self.identity2 = Identity() # for diagnostics.
self.identity3 = Identity() # for diagnostics.
self.out_proj = ScaledLinear(hidden_channels, channels,
bias=True,
initial_scale=0.05)
@ -1652,16 +1655,19 @@ attn_weights: a Tensor of shape (num_heads, batch_size, seq_len, seq_len)
(seq_len, batch_size, _) = x.shape
hidden_channels = self.hidden_channels
s = x[..., hidden_channels:]
x = x[..., :hidden_channels]
x, y = x.chunk(2, dim=-1)
s, x = x.chunk(2, dim=-1)
# s will go through tanh.
s = self.balancer(s)
s = self.tanh(s)
s = s.unsqueeze(-1).reshape(seq_len, batch_size, hidden_channels)
x = self.whiten1(x)
x = self.activation(x) # diagnostics only, it's the identity.
x = x * s
x = self.identity1(x) # diagnostics only, it's the identity.
(seq_len, batch_size, embed_dim) = x.shape
num_heads = attn_weights.shape[0]
@ -1673,6 +1679,11 @@ attn_weights: a Tensor of shape (num_heads, batch_size, seq_len, seq_len)
# now x: (num_heads, batch_size, seq_len, head_dim)
x = x.permute(2, 1, 0, 3).reshape(seq_len, batch_size, -1)
y = torch.nn.functional.glu(y, dim=-1)
y = self.identity2(y)
x = x * y
x = self.identity3(x)
x = self.out_proj(x)
x = self.whiten2(x)
return x