Fix bug setting layerdrop mask

This commit is contained in:
Daniel Povey 2022-10-05 16:19:45 +08:00
parent 61f62837fa
commit 1cd7e93183

View File

@ -362,7 +362,7 @@ class ConformerEncoder(nn.Module):
mask[-final_layers_dropped:] = 0.0 mask[-final_layers_dropped:] = 0.0
layer_drop_prob = 0.075 layer_drop_prob = 0.075
for i in range(final_layers_dropped): for i in range(num_layers - final_layers_dropped):
mask[i] = (rng.random() > layer_drop_prob) mask[i] = (rng.random() > layer_drop_prob)
if mask.sum() == 0.0: if mask.sum() == 0.0:
mask[0] = 1.0 mask[0] = 1.0