diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp index be2cc259f..c768fb7ef 100644 Binary files a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp and b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp differ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py index 60d25ee0e..33a1da0d2 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py @@ -1616,12 +1616,12 @@ def run_adapter(rank, world_size, args, wb=None): adapter_names = [] adapter_param = [] - ''' for i, lora in enumerate(lora_modules): for n, p in lora.lora.named_parameters(): new_n = str(i) + n adapter_names.append(new_n) adapter_param.append(p) + ''' for n, p in model.named_parameters(): if 'joiner' in n or 'simple' in n or 'ctc' in n: @@ -1630,7 +1630,7 @@ def run_adapter(rank, world_size, args, wb=None): p.requires_grad = True else: p.requires_grad = False - + ''' #for lora in lora_modules: # print(lora.lora.state_dict()) #print(adapter_names)