Replace norm in ConvolutionModule with a scaling factor.

This commit is contained in:
Daniel Povey 2022-03-10 16:01:53 +08:00
parent 87b843f023
commit 425e274c82
2 changed files with 4 additions and 3 deletions

View File

@ -857,7 +857,8 @@ class ConvolutionModule(nn.Module):
bias=bias,
)
self.norm = nn.LayerNorm(channels)
self.scale = ExpScale(1, speed=10.0, initial_scale=1.0)
# shape: (channels, 1), broadcasts with (batch, channel, time).
self.activation = SwishOffset()
@ -891,7 +892,7 @@ class ConvolutionModule(nn.Module):
x = self.depthwise_conv(x)
# x is (batch, channels, time)
x = x.permute(0, 2, 1)
x = self.norm(x)
x = self.scale(x)
x = x.permute(0, 2, 1)
x = self.activation(x)

View File

@ -110,7 +110,7 @@ def get_parser():
parser.add_argument(
"--exp-dir",
type=str,
default="transducer_stateless/specaugmod_baseline_randcombine1_expscale3_brelu2swish2_0.1_bnorm",
default="transducer_stateless/specaugmod_baseline_randcombine1_expscale3_brelu2swish2_0.1_bnorm2",
help="""The experiment dir.
It specifies the directory where all training related
files, e.g., checkpoints, log, etc, are saved