From 6171d8ef8dc8c23eb715a71d1c8af99d2b560209 Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Thu, 25 May 2023 17:31:36 +0900 Subject: [PATCH] from local --- .../.train_lora.py.swp | Bin 86016 -> 86016 bytes .../train_lora.py | 3 ++- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp index 05980cff34ca956a9941597447c855e9bf938650..eead3c3b2bdea4dea99bfc45a87932f97b4bc5b5 100644 GIT binary patch delta 133 zcmZozz}m2YRW!*U%+puFQqO<^2m}}yuFK@76l@f|EyyUh`JqyRIzbWg`FzWFZa! diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py index 7b793fcaa..76d8a199e 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py @@ -1607,8 +1607,9 @@ def run_adapter(rank, world_size, args, wb=None): for modules in model.modules(): if isinstance(modules, fairseq.modules.multihead_attention.MultiheadAttention): for module in modules.modules(): - print(module) + #print(module) if isinstance(module, torch.nn.Linear): + print(module) lora_modules.append(LoRAHook(module)) adapter_names = []