From 62b42887b4cf3c23ce019211833cad6acea401ae Mon Sep 17 00:00:00 2001 From: Daniel Povey Date: Sun, 8 Jan 2023 13:17:39 +0800 Subject: [PATCH] Revert zipformer.py to status on previous commit --- .../ASR/pruned_transducer_stateless7/zipformer.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py index 58b2e6cb2..5396b1895 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py @@ -207,15 +207,6 @@ class Zipformer(EncoderInterface): attention_share_layers=attention_share_layers[i], ) - # modify the layerdrop schedule with an extra schedule that takes longer - # to warm up for the less-downsampled layers; this encourages the more - # heavily downsampled layers to learn something. - - extra_layerdrop = ScheduledFloat((0.0, 0.2), (20000.0 / downsampling_factor[i], 0.0)) - for layer in encoder.layers: - # we can add objects of type ScheduledFloat. - layer.layer_skip_rate = layer.layer_skip_rate + extra_layerdrop - if downsampling_factor[i] != 1: encoder = DownsampledZipformerEncoder( encoder, @@ -229,9 +220,6 @@ class Zipformer(EncoderInterface): encoder.lr_scale = downsampling_factor[i] ** -0.25 encoders.append(encoder) - - - self.encoders = nn.ModuleList(encoders) # initializes self.skip_layers and self.skip_modules