From aecaf1cedc5d14d83cde3ceb6e667ec92a32f6f1 Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Thu, 18 May 2023 17:07:42 +0900 Subject: [PATCH] from local --- .../.data2vec_audio.py.swp | Bin 49152 -> 49152 bytes .../data2vec_audio.py | 14 ++++++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.data2vec_audio.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.data2vec_audio.py.swp index 77a6e1c589b5ca135c4b6237f2b02efc2465c936..507fc72b0d27840f7ab5c8dcd5ae700eecb596e1 100644 GIT binary patch delta 459 zcmZ9`ze@sP9LMqRari5(PQoDzauRP5X(`(*mryYwS|sek$2jiv^zLeK@g)C(9*95) zYOWz{tCm*(105=GYHVu=`k+$L121^sdGq0M9MN&a&|zY4I~PgD6NDoXeGXaq{w`FM zgnX|d@%-$Ti>T|KJmz`upxf&Ei5ifFj~Sw8xP~ie!#=FT8b~k?bKnC3Mn0k;oWlvE zVHE-}@L~&2Ap>FXf(Hg3qNazURL4bwG(;f?BYd$Bw{Qd9UO~yYdM`(o$S)saEs9J} ztrk~ns$HrXW>wKkF^lPyxNewADJB1p;!S26*577x(v(>xVls=_rMhA&Rc15O8ZXHk z^fNMEo>uDs&4(3Z1=SVsPC!CTNu`Soop-jX$1R;V(3Uyj~arWSlxVSu{-$9gT zPzRR=hrWRX4TAFt+M2q+8-8%Zf4Mx*@H``Nm%X_trE;09aHfd=)x7Ud5>+!$(9fnS z?~QguzC+uwpsf0^Gv5{I;u;lPA&)eUu!@Sz(L)<$*w{owmKdOhA}(QL5f;%Hp@#=J zNMj#A6!@UzSpImXm|%=|+~O25tVy2fN~0Lmzti)89=6^L_3?P79M@8D*H)kIp*mP3 QL$A%JR_5hI`>y5u1B{+I4*&oF diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/data2vec_audio.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/data2vec_audio.py index f45a09e6f..666a069a7 100644 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/data2vec_audio.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/data2vec_audio.py @@ -252,9 +252,19 @@ class LoRAModule(nn.Module): else: self.lora_dropout = lambda x: x - self.lora_A = nn.Linear(embedding_dim, self.r) - self.lora_B = nn.Linear(self.r, embedding_dim) + self.lora_A = nn.ModuleList( + [nn.Linear(embedding_dim, self.r) for _ in range(layer_num)]) + self.lora_B = nn.ModuleList( + [nn.Linear(self.r, embedding_dim) for _ in range(layer_num)]) self.scaling = self.lora_alpha / self.r + self.reset_parameters() + + def reset_parameters(self): + nn.init.zeros_(self.lora_B) + nn.init_normal_(self.lora_A) + + def forward(self, x, layer_id=-1): + '''