From 930828fecaa87f063e92a78e5200b2ff270716bd Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Mon, 26 Dec 2022 14:01:00 +0900 Subject: [PATCH] from local --- .../.train.py.swp | Bin 94208 -> 94208 bytes .../train.py | 4 ++++ 2 files changed, 4 insertions(+) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train.py.swp index b1888c9bd08635fac0c1fca149cf6f7d973119c9..f8f15d3d15dc72dc69b0b21b7dc40a69d529a322 100644 GIT binary patch delta 222 zcmZp8z}oPDRW!*U%+puFQqO<^2m}}yy0um&|Jx{fiUK>TuhYye{#@ATDijA|f421KZU2)XS*B7kvvfjT4aQ006;?F&+Q_ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py index 4ee896313..4887247f9 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train.py @@ -1524,6 +1524,10 @@ def run_adapter(rank, world_size, args, wb=None): else: p.requires_grad = False + for n, p in model.named_parameters(): + if p.requires_grad: + logging.info(n) + optimizer_adapter = ScaledAdam( adapter_param, lr=0.0001,