From 70a3c56a18d726d0a04c2774247e132e4464f54f Mon Sep 17 00:00:00 2001 From: "Wang, Guanbo" Date: Wed, 9 Feb 2022 03:42:28 -0500 Subject: [PATCH] Fix librispeech train.py (#211) * fix librispeech train.py * remove note --- egs/librispeech/ASR/conformer_ctc/train.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/egs/librispeech/ASR/conformer_ctc/train.py b/egs/librispeech/ASR/conformer_ctc/train.py index cb0bd5c2d..058efd061 100755 --- a/egs/librispeech/ASR/conformer_ctc/train.py +++ b/egs/librispeech/ASR/conformer_ctc/train.py @@ -601,14 +601,14 @@ def run(rank, world_size, args): if torch.cuda.is_available(): device = torch.device("cuda", rank) - if "lang_bpe" in params.lang_dir: + if "lang_bpe" in str(params.lang_dir): graph_compiler = BpeCtcTrainingGraphCompiler( params.lang_dir, device=device, sos_token="", eos_token="", ) - elif "lang_phone" in params.lang_dir: + elif "lang_phone" in str(params.lang_dir): assert params.att_rate == 0, ( "Attention decoder training does not support phone lang dirs " "at this time due to a missing symbol. Set --att-rate=0 " @@ -650,9 +650,7 @@ def run(rank, world_size, args): model.to(device) if world_size > 1: - # Note: find_unused_parameters=True is needed in case we - # want to set params.att_rate = 0 (i.e. att decoder is not trained) - model = DDP(model, device_ids=[rank], find_unused_parameters=True) + model = DDP(model, device_ids=[rank]) optimizer = Noam( model.parameters(),