mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-08-10 18:42:19 +00:00
Fix test mode with random layer dropout
This commit is contained in:
parent
d2ed3dfc90
commit
0e694739f2
@ -231,7 +231,8 @@ class ConformerEncoderLayer(nn.Module):
|
|||||||
# period we sometimes use scale 1.0; this ensures that the modules do not
|
# period we sometimes use scale 1.0; this ensures that the modules do not
|
||||||
# compensate for the small scale by just producing larger output.
|
# compensate for the small scale by just producing larger output.
|
||||||
warmup = max(warmup, 0.1)
|
warmup = max(warmup, 0.1)
|
||||||
warmup = min(warmup, 0.95) # effectively, layer-drop.
|
if self.training:
|
||||||
|
warmup = min(warmup, 0.95) # effectively, layer-drop.
|
||||||
alpha = 1.0 if torch.rand(()).item() <= warmup else 0.1
|
alpha = 1.0 if torch.rand(()).item() <= warmup else 0.1
|
||||||
|
|
||||||
# macaron style feed forward module
|
# macaron style feed forward module
|
||||||
|
Loading…
x
Reference in New Issue
Block a user