From 810d178208a2584c65ae22791a0b4c5f23c09190 Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Wed, 24 May 2023 13:08:08 +0900 Subject: [PATCH] from local --- egs/librispeech/ASR/.lora.sh.swp | Bin 12288 -> 12288 bytes .../.train_lora.py.swp | Bin 86016 -> 86016 bytes .../train_lora.py | 3 ++- 3 files changed, 2 insertions(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/.lora.sh.swp b/egs/librispeech/ASR/.lora.sh.swp index 33fa1c12d2b5463afcd7233bbf022911c0ca51e8..ff6ca3d377a395bc84030afb0dfcc820a8a95252 100644 GIT binary patch delta 31 lcmZojXh;xCG6?hZRj|}EU;qLE1_r&}+>{mS8^vDf0|0+v2v-0A delta 31 lcmZojXh;xCG6?hZRj|}EU;qLE28Js=xhbh?8^vDf0|10Q2)F<2`l-2$=pziP3HQ0d~eprtRBR89lWCjpG@# diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py index 45be99671..72f4310af 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py @@ -1589,7 +1589,8 @@ def run_adapter(rank, world_size, args, wb=None): if world_size > 1: logging.info("Using DDP") model = DDP(model, device_ids=[rank], find_unused_parameters=True) - + + print('-'*30) for module in model.modules(): print(module)