From 3fe4578b2fecdf6e36e2fa7a74fe81361b6b1f57 Mon Sep 17 00:00:00 2001 From: Fangjun Kuang Date: Thu, 21 Sep 2023 12:14:26 +0800 Subject: [PATCH] Use torch.jit.script() to export the decoder model See also https://github.com/k2-fsa/sherpa-onnx/issues/327 --- .../ASR/pruned_transducer_stateless7_streaming/export-onnx.py | 1 + 1 file changed, 1 insertion(+) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless7_streaming/export-onnx.py b/egs/librispeech/ASR/pruned_transducer_stateless7_streaming/export-onnx.py index 6f84d79b4..d71080760 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless7_streaming/export-onnx.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless7_streaming/export-onnx.py @@ -401,6 +401,7 @@ def export_decoder_model_onnx( context_size = decoder_model.decoder.context_size vocab_size = decoder_model.decoder.vocab_size y = torch.zeros(10, context_size, dtype=torch.int64) + decoder_model = torch.jit.script(decoder_model) torch.onnx.export( decoder_model, y,