diff --git a/egs/librispeech/ASR/conformer_ctc/train.py b/egs/librispeech/ASR/conformer_ctc/train.py index e77327146..058efd061 100755 --- a/egs/librispeech/ASR/conformer_ctc/train.py +++ b/egs/librispeech/ASR/conformer_ctc/train.py @@ -650,8 +650,6 @@ def run(rank, world_size, args): model.to(device) if world_size > 1: - # Note: find_unused_parameters=True is needed in case we - # want to set params.att_rate = 0 (i.e. att decoder is not trained) model = DDP(model, device_ids=[rank]) optimizer = Noam(