From b975bdef9e5253b56782de3d344b27ee0d269d83 Mon Sep 17 00:00:00 2001 From: Fangjun Kuang Date: Thu, 14 Oct 2021 21:16:59 +0800 Subject: [PATCH] Disable decoder layers in pretrained.py if it is not used. --- egs/librispeech/ASR/conformer_ctc/pretrained.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/conformer_ctc/pretrained.py b/egs/librispeech/ASR/conformer_ctc/pretrained.py index be94e6875..c6de743ea 100755 --- a/egs/librispeech/ASR/conformer_ctc/pretrained.py +++ b/egs/librispeech/ASR/conformer_ctc/pretrained.py @@ -226,7 +226,13 @@ def main(): args = parser.parse_args() params = get_params() + if args.method != "attention-decoder": + # to save memory as the attention decoder + # will not be used + params.num_decoder_layers = 0 + params.update(vars(args)) + logging.info(f"{params}") device = torch.device("cpu") @@ -248,7 +254,7 @@ def main(): ) checkpoint = torch.load(args.checkpoint, map_location="cpu") - model.load_state_dict(checkpoint["model"]) + model.load_state_dict(checkpoint["model"], strict=False) model.to(device) model.eval()