diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp index 37e5b443c..9d7582dd4 100644 Binary files a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp and b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp differ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py index 1788243ff..4c25feaf3 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py @@ -1606,13 +1606,13 @@ def run_adapter(rank, world_size, args, wb=None): lora_modules = [] for modules in model.modules(): if isinstance(modules, fairseq.modules.multihead_attention.MultiheadAttention): - lora_modules.append(LoRAHook(modules)) - #for module in modules.modules(): + #lora_modules.append(LoRAHook(modules)) + for module in modules.modules(): # if rank == 0: logging.info(module) #print(module) - #if isinstance(module, torch.nn.Linear): + if isinstance(module, torch.nn.Linear): #if rank == 0: print(module) - #lora_modules.append(LoRAHook(module)) + lora_modules.append(LoRAHook(module)) adapter_names = [] adapter_param = []