Change sigmoid to tanh in NonlinAttentionModule, and adjust abs limits of balancer to compensate.

This commit is contained in:
Daniel Povey 2022-11-30 21:44:45 +08:00
parent 2969eb5467
commit ba31272c92

View File

@ -1464,10 +1464,10 @@ class NonlinAttentionModule(nn.Module):
channels // (2 * ratio), channel_dim=-1,
min_positive=ScheduledFloat((0.0, 0.1), (8000.0, 0.05)),
max_positive=1.0,
min_abs=1.5,
max_abs=ScheduledFloat((0.0, 5.0), (8000.0, 10.0), default=1.0),
min_abs=0.75,
max_abs=ScheduledFloat((0.0, 2.5), (8000.0, 5.0), default=1.0),
)
self.sigmoid = nn.Sigmoid()
self.tanh = nn.Tanh()
self.activation = Identity() # for diagnostics.
self.out_proj = ScaledLinear(channels // 2, channels,
@ -1501,7 +1501,7 @@ attn_weights: a Tensor of shape (num_heads, batch_size, seq_len, seq_len)
x = x[..., :num_channels // 2]
s = self.balancer(s)
s = self.sigmoid(s)
s = self.tanh(s)
s = s.unsqueeze(-1).expand(-1, -1, -1, self.ratio).reshape(seq_len, batch_size, num_channels // 2)