Merge branch 'k2-fsa:master' into dev_swbd

This commit is contained in:
zr_jin 2023-09-22 01:25:20 +08:00 committed by GitHub
commit 3ae86f1431
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 75 additions and 8 deletions

View File

@ -338,7 +338,7 @@ We provide one model for this recipe: [Pruned stateless RNN-T: Conformer encoder
#### Pruned stateless RNN-T: Conformer encoder + Embedding decoder + k2 pruned RNN-T loss
The best results for Chinese CER(%) and English WER(%) respectivly (zh: Chinese, en: English):
The best results for Chinese CER(%) and English WER(%) respectively (zh: Chinese, en: English):
|decoding-method | dev | dev_zh | dev_en | test | test_zh | test_en |
|--|--|--|--|--|--|--|
|greedy_search| 7.30 | 6.48 | 19.19 |7.39| 6.66 | 19.13|

View File

@ -151,12 +151,14 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def init_decoder(self, decoder_model_filename: str):
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -170,6 +172,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -152,12 +152,14 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def init_decoder(self, decoder_model_filename: str):
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -171,6 +173,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -79,7 +79,7 @@ if [ $stage -le 0 ] && [ $stop_stage -ge 0 ]; then
# ln -sfv /path/to/rirs_noises $dl_dir/
#
if [ ! -d $dl_dir/rirs_noises ]; then
lhotse download rirs_noises $dl_dir
lhotse download rir-noise $dl_dir/rirs_noises
fi
fi
@ -89,6 +89,7 @@ if [ $stage -le 1 ] && [ $stop_stage -ge 1 ]; then
# to $dl_dir/librispeech. We perform text normalization for the transcripts.
# NOTE: Alignments are required for this recipe.
mkdir -p data/manifests
lhotse prepare librispeech -p train-clean-100 -p train-clean-360 -p train-other-500 -p dev-clean \
-j 4 --alignments-dir $dl_dir/libri_alignments/LibriSpeech $dl_dir/librispeech data/manifests/
fi
@ -112,7 +113,7 @@ if [ $stage -le 3 ] && [ $stop_stage -ge 3 ]; then
# We assume that you have downloaded the RIRS_NOISES corpus
# to $dl_dir/rirs_noises
lhotse prepare rir-noise -p real_rir -p iso_noise $dl_dir/rirs_noises data/manifests
lhotse prepare rir-noise -p real_rir -p iso_noise $dl_dir/rirs_noises/RIRS_NOISES data/manifests
fi
if [ $stage -le 4 ] && [ $stop_stage -ge 4 ]; then

View File

@ -136,6 +136,7 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
self.init_encoder_states()
@ -184,6 +185,7 @@ class OnnxModel:
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -197,6 +199,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -129,6 +129,7 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
self.init_encoder_states()
@ -166,6 +167,7 @@ class OnnxModel:
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -179,6 +181,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -172,30 +172,35 @@ class Model:
self.encoder = ort.InferenceSession(
args.encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def init_decoder(self, args):
self.decoder = ort.InferenceSession(
args.decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def init_joiner(self, args):
self.joiner = ort.InferenceSession(
args.joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def init_joiner_encoder_proj(self, args):
self.joiner_encoder_proj = ort.InferenceSession(
args.joiner_encoder_proj_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def init_joiner_decoder_proj(self, args):
self.joiner_decoder_proj = ort.InferenceSession(
args.joiner_decoder_proj_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def run_encoder(self, x, h0, c0) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:

View File

@ -150,12 +150,14 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def init_decoder(self, decoder_model_filename: str):
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -169,6 +171,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -78,6 +78,7 @@ def test_conv2d_subsampling():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()
@ -133,6 +134,7 @@ def test_rel_pos():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()
@ -220,6 +222,7 @@ def test_conformer_encoder_layer():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()
@ -304,6 +307,7 @@ def test_conformer_encoder():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()
@ -359,6 +363,7 @@ def test_conformer():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()

View File

@ -138,6 +138,7 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
self.init_encoder_states()
@ -185,6 +186,7 @@ class OnnxModel:
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -198,6 +200,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -71,6 +71,10 @@ class Decoder(nn.Module):
groups=decoder_dim // 4, # group size == 4
bias=False,
)
else:
# To avoid `RuntimeError: Module 'Decoder' has no attribute 'conv'`
# when inference with torch.jit.script and context_size == 1
self.conv = nn.Identity()
def forward(self, y: torch.Tensor, need_pad: bool = True) -> torch.Tensor:
"""

View File

@ -74,6 +74,7 @@ def test_conv2d_subsampling():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()
@ -128,6 +129,7 @@ def test_rel_pos():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()
@ -204,6 +206,7 @@ def test_zipformer_encoder_layer():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()
@ -284,6 +287,7 @@ def test_zipformer_encoder():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()
@ -338,6 +342,7 @@ def test_zipformer():
session = ort.InferenceSession(
filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
input_nodes = session.get_inputs()

View File

@ -326,41 +326,49 @@ def main():
encoder = ort.InferenceSession(
args.encoder_model_filename,
sess_options=session_opts,
providers=["CPUExecutionProvider"],
)
decoder = ort.InferenceSession(
args.decoder_model_filename,
sess_options=session_opts,
providers=["CPUExecutionProvider"],
)
joiner = ort.InferenceSession(
args.joiner_model_filename,
sess_options=session_opts,
providers=["CPUExecutionProvider"],
)
joiner_encoder_proj = ort.InferenceSession(
args.joiner_encoder_proj_model_filename,
sess_options=session_opts,
providers=["CPUExecutionProvider"],
)
joiner_decoder_proj = ort.InferenceSession(
args.joiner_decoder_proj_model_filename,
sess_options=session_opts,
providers=["CPUExecutionProvider"],
)
lconv = ort.InferenceSession(
args.lconv_filename,
sess_options=session_opts,
providers=["CPUExecutionProvider"],
)
frame_reducer = ort.InferenceSession(
args.frame_reducer_filename,
sess_options=session_opts,
providers=["CPUExecutionProvider"],
)
ctc_output = ort.InferenceSession(
args.ctc_output_filename,
sess_options=session_opts,
providers=["CPUExecutionProvider"],
)
sp = spm.SentencePieceProcessor()

View File

@ -130,6 +130,7 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
self.init_encoder_states()
@ -229,6 +230,7 @@ class OnnxModel:
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -242,6 +244,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -865,7 +865,7 @@ class ZipformerEncoderLayer(nn.Module):
return final_dropout_rate
else:
return initial_dropout_rate - (
initial_dropout_rate * final_dropout_rate
initial_dropout_rate - final_dropout_rate
) * (self.batch_count / warmup_period)
def forward(

View File

@ -230,7 +230,7 @@ class Conformer(Transformer):
x, pos_emb, mask=mask, src_key_padding_mask=src_key_padding_mask
) # (T, B, F)
else:
x = self.encoder(x, pos_emb, src_key_padding_mask=mask) # (T, B, F)
x = self.encoder(x, pos_emb, src_key_padding_mask=src_key_padding_mask) # (T, B, F)
if self.normalize_before:
x = self.after_norm(x)

View File

@ -146,6 +146,7 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
self.init_encoder_states()
@ -236,6 +237,7 @@ class OnnxModel:
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -249,6 +251,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -151,12 +151,14 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def init_decoder(self, decoder_model_filename: str):
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -170,6 +172,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -49,7 +49,7 @@ if [ $stage -le 2 ] && [ $stop_stage -ge 2 ]; then
log "Stage 2: Prepare THCHS-30"
if [ ! -d $dl_dir/thchs30 ]; then
log "Downloading THCHS-30"
lhotse download thchs30 $dl_dir/thchs30
lhotse download thchs-30 $dl_dir/thchs30
fi
if [ ! -f data/manifests/.thchs30.done ]; then

View File

@ -724,12 +724,12 @@ def main():
)
save_results(
params=params,
test_set_name=test_set,
test_set_name=test_set + "-zh",
results_dict=zh_results_dict,
)
save_results(
params=params,
test_set_name=test_set,
test_set_name=test_set + "-en",
results_dict=en_results_dict,
)

View File

@ -258,6 +258,7 @@ def main():
encoder_session = ort.InferenceSession(
args.onnx_encoder_filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
test_encoder(model, encoder_session)
@ -265,6 +266,7 @@ def main():
decoder_session = ort.InferenceSession(
args.onnx_decoder_filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
test_decoder(model, decoder_session)
@ -272,14 +274,17 @@ def main():
joiner_session = ort.InferenceSession(
args.onnx_joiner_filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
joiner_encoder_proj_session = ort.InferenceSession(
args.onnx_joiner_encoder_proj_filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
joiner_decoder_proj_session = ort.InferenceSession(
args.onnx_joiner_decoder_proj_filename,
sess_options=options,
providers=["CPUExecutionProvider"],
)
test_joiner(
model,

View File

@ -139,6 +139,7 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
self.init_encoder_states()
@ -186,6 +187,7 @@ class OnnxModel:
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -199,6 +201,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -158,12 +158,14 @@ class OnnxModel:
self.encoder = ort.InferenceSession(
encoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
def init_decoder(self, decoder_model_filename: str):
self.decoder = ort.InferenceSession(
decoder_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
decoder_meta = self.decoder.get_modelmeta().custom_metadata_map
@ -177,6 +179,7 @@ class OnnxModel:
self.joiner = ort.InferenceSession(
joiner_model_filename,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
joiner_meta = self.joiner.get_modelmeta().custom_metadata_map

View File

@ -54,6 +54,7 @@ class OnnxModel:
self.model = ort.InferenceSession(
nn_model,
sess_options=self.session_opts,
providers=["CPUExecutionProvider"],
)
meta = self.model.get_modelmeta().custom_metadata_map