From 875c436bf30ee723a1f1101735add9b97d4a80c4 Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Tue, 11 Apr 2023 16:10:47 +0900 Subject: [PATCH] from local --- .../.optim.py.swp | Bin 61440 -> 61440 bytes .../.prompt_tuning.py.swp | Bin 86016 -> 90112 bytes .../prompt_tuning.py | 4 +++- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.optim.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.optim.py.swp index 7a3b3f242666472e341ef369e18a3ee0fdaec220..08afb901a62a1ffea1961fd348c4a3d61151f363 100644 GIT binary patch delta 32 mcmZp8z})bFSv1KY%+puFQqO<^2m}}y8re-#PHhx@{T=|9oe8V} delta 32 mcmZp8z})bFSv1KY%+puFQqO<^2m}}yKC_vo2yGO7{T=|9{|RLP diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.prompt_tuning.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.prompt_tuning.py.swp index 4af048addf70237aecc93dcd55db01f30e2030e3..c45141abe409f4f5b92730f2cb597c35dcbd55b0 100644 GIT binary patch delta 182 zcmZozz}j$tb;EmRMuyEFn1y&4MK&`F=rS^9PBs)27ZPQF01hav#j@E-a2ekw1(pN6 zzKjeEyv#re0S1Q5&4Le>GcxfqZ{GDVn2|*R2qrsbh;rxT7Z=B8=B4FpDr`1L6^K27_$N@!53t(Ji9+*vCqB3`dEY{w%`YB$ GGXelvS}UCZ delta 83 zcmV-Z0IdIjzy*My1+eb}0adf`0}>1Y6SD&lEdc>vLFP&4FmuH p0Neop000mG0B^Gq;HLos+XAz?;Zp&ZI|Km?m*!sq8nZLw&1^Hd8kztA diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py index 9e461556e..2aabd8765 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py @@ -1063,6 +1063,8 @@ def train_one_epoch( try: loss_info.reduce(loss.device) except: pass + logging.info(loss_info) + numel = params.world_size / (params.accum_grads * loss_info["utterances"]) loss *= numel ## normalize loss over utts(batch size) @@ -1607,7 +1609,7 @@ def run_adapter(rank, world_size, args, wb=None): [prompt], lr=params.adapter_lr, clipping_scale=5.0, - parameters_names=['p'], + parameters_names=['P'], ) scheduler_adapter = Eden(optimizer_adapter, 10000, 7) #params.lr_batche, params.lr_epochs)