diff --git a/egs/librispeech/ASR/conformer_ctc/pretrained.py b/egs/librispeech/ASR/conformer_ctc/pretrained.py index 3ed2d74e5..07d3e7269 100755 --- a/egs/librispeech/ASR/conformer_ctc/pretrained.py +++ b/egs/librispeech/ASR/conformer_ctc/pretrained.py @@ -59,7 +59,7 @@ def get_parser(): type=str, required=True, help="Path to lang bpe dir.", - ) + ) parser.add_argument( "--method", @@ -325,7 +325,9 @@ def main(): "attention-decoder", ]: logging.info(f"Loading HLG from {params.lang_dir}/HLG.pt") - HLG = k2.Fsa.from_dict(torch.load(params.lang_dir + "/HLG.pt", map_location="cpu")) + HLG = k2.Fsa.from_dict( + torch.load(params.lang_dir + "/HLG.pt", map_location="cpu") + ) HLG = HLG.to(device) if not hasattr(HLG, "lm_scores"): # For whole-lattice-rescoring and attention-decoder @@ -390,7 +392,9 @@ def main(): best_path = next(iter(best_path_dict.values())) hyps = get_texts(best_path) - word_sym_table = k2.SymbolTable.from_file(params.lang_dir + "/words.txt") + word_sym_table = k2.SymbolTable.from_file( + params.lang_dir + "/words.txt" + ) hyps = [[word_sym_table[i] for i in ids] for ids in hyps] s = "\n"