From 3468c3aa5ae2e48f28af9a66171e6919b9cae7d8 Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Mon, 11 Jul 2022 14:12:24 -0700 Subject: [PATCH] Remove ActivationBalancer, unnecessary --- egs/librispeech/ASR/pruned_transducer_stateless7/optim.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/optim.py b/egs/librispeech/ASR/pruned_transducer_stateless7/optim.py index 5b960e30e..1d87f3f73 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/optim.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/optim.py @@ -1664,10 +1664,8 @@ def _test_eve_cain(): Linear = torch.nn.Linear if iter == 0 else ScaledLinear # TODO: find out why this is not converging... m = torch.nn.Sequential(Linear(E, 200), - ActivationBalancer(-1), torch.nn.PReLU(), Linear(200, 200), - ActivationBalancer(-1), torch.nn.PReLU(), Linear(200, E), ).to(device)