From 3af6db8981ec74beeb46549528a970b5b63cf60e Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Thu, 25 May 2023 21:29:22 +0900 Subject: [PATCH] from local --- .../.train_lora.py.swp | Bin 86016 -> 86016 bytes .../train_lora.py | 4 ---- 2 files changed, 4 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.train_lora.py.swp index 7e7b2e29c44b1ffebbb9d457a3cf6509fc8ab729..89211ec789291112b9063539d12684c06484766a 100644 GIT binary patch delta 500 zcmXBQKS)AR6vy%NEYtF-?9^hC+cAsYJUAR!G;A+gpVq9Gz(8X9U*uxQA_ zu_1|i5Y*}#h>I-|^lvMyp&{ri9{3zCmwOL~U$JU0R_%e@WO{aSG#)mD(nMmZ_55Bv z@Te>P(w81m%q!j3`E$RjB1w1eiQm#Sr4uSR`b0|oy*sn}ooaeTPOt(KGYG?m2aiY{ z`&dODCL;J{b_;bJq6ia9h{1zzc6i4Vu2m^WjYAp{c+hl-T;d$N*g^_Z2;gT(q=7qR zk--RpcpDUXMjH+6U<2cD!g7kNA&U%#odq^=h}3Y3P0V8sDjfJ2U}M~(h;1w&4g(Vy zgAaliJ1OBF7dXQ)j*x~QZ4z&xtkSyi=P+#@7|M;U;*BbyZZj-e&{bJZDA7tesp!^c HLeaE;?(a@Z delta 575 zcmY+=Pe_w-9LMqRZ}{la*`7!CAlPybcrj>WqlgrS2QLL?STH(BLw)8L>YfUuOSM5B zMDQO9%TuFMC<%;{F5c zZX_l|7pZ^b*snLuJwpYSej6c`+%j5DFPb-eB5lpJl7MUI>NJ#l=@IeQ)!vfTcJjk7 z@*a8ILo4>Lij?sMt9XMxgrVV(xd$j?1Ctm)7>zh*cNyQYi5WceNlt7ogNVUGsfhuY z!9(0cJI)$K{@@$7u!!gAMHhCji2TF~mN9~-NT3)0yduBw0aLhzqlO%f4K(5%24UeE zD)k~e*v2&8A_W_FaIT5`#aDd7IzFO^IlRIU`p}I|T!)DOO8ok~k0XHr=jY^LJ!MtrtVtF)JRoqTN>EB&ga8Ydz|A;W(&5(GH9JUo!5b)0u2K dlF5!bofD2dIi9*9)v{^|3;oJ;D?LirF9CHTXd?gs diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py index 75b12e33a..5c9578ed7 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/train_lora.py @@ -1608,12 +1608,8 @@ def run_adapter(rank, world_size, args, wb=None): lora_modules = [] for modules in model.modules(): if isinstance(modules, fairseq.modules.multihead_attention.MultiheadAttention): - #lora_modules.append(LoRAHook(modules)) for module in modules.modules(): - # if rank == 0: logging.info(module) - #print(module) if isinstance(module, torch.nn.Linear): - #if rank == 0: print(module) lora_modules.append(LoRAHook(module)) adapter_names = []