From 5de9d0a19a77745d6206cec9ad5d24ff40c95c94 Mon Sep 17 00:00:00 2001 From: yaozengwei Date: Tue, 10 May 2022 11:48:22 +0800 Subject: [PATCH] add tanh_on_mem option --- .../ASR/emformer_pruned_transducer_stateless/train.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/egs/librispeech/ASR/emformer_pruned_transducer_stateless/train.py b/egs/librispeech/ASR/emformer_pruned_transducer_stateless/train.py index e187a08e7..18a845a93 100755 --- a/egs/librispeech/ASR/emformer_pruned_transducer_stateless/train.py +++ b/egs/librispeech/ASR/emformer_pruned_transducer_stateless/train.py @@ -130,6 +130,13 @@ def add_model_arguments(parser: argparse.ArgumentParser): help="Number of entries in the memory for the Emformer", ) + parser.add_argument( + "--tanh-on-mem", + type=str2bool, + default=False, + help="Whether to apply tanh on memory", + ) + def get_parser(): parser = argparse.ArgumentParser( @@ -369,6 +376,7 @@ def get_encoder_model(params: AttributeDict) -> nn.Module: chunk_length=params.chunk_length, right_context_length=params.right_context_length, max_memory_size=params.memory_size, + tanh_on_mem=params.tanh_on_mem, ) return encoder