Change max_factor in DerivBalancer from 0.025 to 0.01; fix scaling code.

This commit is contained in:
Daniel Povey 2022-03-11 14:47:46 +08:00
parent 2940d3106f
commit bcf417fce2
2 changed files with 5 additions and 5 deletions

View File

@ -48,13 +48,13 @@ class Conv2dSubsampling(nn.Module):
in_channels=1, out_channels=odim, kernel_size=3, stride=2
),
DerivBalancer(channel_dim=1, threshold=0.05,
max_factor=0.025),
max_factor=0.01),
ExpScaleRelu(odim, 1, 1, speed=20.0),
nn.Conv2d(
in_channels=odim, out_channels=odim, kernel_size=3, stride=2
),
DerivBalancer(channel_dim=1, threshold=0.05,
max_factor=0.025),
max_factor=0.01),
ExpScaleRelu(odim, 1, 1, speed=20.0),
)
self.out = nn.Linear(odim * (((idim - 1) // 2 - 1) // 2), odim)

View File

@ -159,7 +159,7 @@ class ConformerEncoderLayer(nn.Module):
self.feed_forward = nn.Sequential(
nn.Linear(d_model, dim_feedforward),
DerivBalancer(channel_dim=-1, threshold=0.05,
max_factor=0.025),
max_factor=0.01),
SwishExpScale(dim_feedforward, speed=20.0),
nn.Dropout(dropout),
nn.Linear(dim_feedforward, d_model),
@ -168,7 +168,7 @@ class ConformerEncoderLayer(nn.Module):
self.feed_forward_macaron = nn.Sequential(
nn.Linear(d_model, dim_feedforward),
DerivBalancer(channel_dim=-1, threshold=0.05,
max_factor=0.025),
max_factor=0.01),
SwishExpScale(dim_feedforward, speed=20.0),
nn.Dropout(dropout),
nn.Linear(dim_feedforward, d_model),
@ -720,7 +720,7 @@ class RelPositionMultiheadAttention(nn.Module):
)
key_padding_mask = key_padding_mask.to(torch.bool)
q = (q.contiguous() * scaling).view(tgt_len, bsz, num_heads, head_dim)
q = (q * scaling).contiguous().view(tgt_len, bsz, num_heads, head_dim)
k = k.contiguous().view(-1, bsz, num_heads, head_dim)
v = v.contiguous().view(-1, bsz * num_heads, head_dim).transpose(0, 1)