Remove use of RandomGrad

This commit is contained in:
Daniel Povey 2022-10-20 19:35:11 +08:00
parent 1018a77410
commit dccff6b893

View File

@ -36,8 +36,6 @@ from scaling import (
_diag,
random_clamp,
with_loss,
softmax,
RandomGrad,
)
from torch import Tensor, nn
@ -306,7 +304,6 @@ class ConformerEncoderLayer(nn.Module):
whitening_limit=5.0,
prob=(0.025, 0.25),
grad_scale=0.01)
self.random_grad = RandomGrad()
def forward(
self,
@ -366,7 +363,7 @@ class ConformerEncoderLayer(nn.Module):
bypass_scale = bypass_scale.clamp(min=0.1, max=1.0)
src = src_orig + delta * self.bypass_scale
return self.random_grad(self.whiten(src))
return self.whiten(src)
class ConformerEncoder(nn.Module):