From 932aca6d1c4fca4a43a842a60008bccd51183c4e Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Wed, 26 Apr 2023 16:58:02 +0900 Subject: [PATCH] from local --- .../.prompt_tuning.py.swp | Bin 77824 -> 77824 bytes .../prompt_tuning.py | 4 ++-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.prompt_tuning.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.prompt_tuning.py.swp index aff48e0e45c79e4f750f6a0aff4883854a53f666..61002606c74e9fa7fb8d638a1f32d52e3ea389cf 100644 GIT binary patch delta 396 zcmXBP&nv@m9LMqZ`@7F~vr=|i`ORE7X$hs=9JozmCT$1dAMgik7iyF>4&RG|Irvcy z>U;Rw2_>;9l$?jt{sA?nJjdZtujA{Pt(t7roST^arXoc*7>jGFZi`e<#t9aY#5l&# z4GT3Ra))DVVjXiBMF;9Wkyl*e42LMXrP-P~%RL%)OYs>?2b{@ajk+U==X<+VL_%J$ zPkS|U)FP5c0`urcBOvmP7vzw^8dfod2tNHHPk6*Bwy}i;4B?044Se7o7sz7=&A_kb zaU)A|&2X@RB)Z^(g$GLZv4S*Gm_`(BxUr~)hXdr0M(EGpRUz!PM|9W!U*GB74(deU E7Y<8A_5c6? delta 387 zcmWm9zb^xE9LDj_=k9y0CM0dx#E+asEJ713MuV-=C?(W_f57f0l9Fg5QH$Yf@FS5( zZaCdUVrXchgIF6ZY$nN3@$T?Sp7D8mP2)AqwaGZIDpCm&nXIlvj>sb(aDgS{F@;I= z!^Wo(so@;E*up#}(2K|xdBH8NaEeM$o%_(h&#qbJ=}{dE(z!;>=@OAtm>ARn-9GCO zDPaLQxM;;ie({VVJZxeEGZ?~mOr(J)Tw))4Si}hac-+D}-cUse2WXf5v>zK;mOI8V zc92IO3~ba%IKes!Sj8;Hk;J`4E^gqXhyqebT8I8uci7|VQLG$wsvFjaH68y4!{bD+ diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py index 5311fc36f..08dd22783 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py @@ -1534,7 +1534,7 @@ def run_adapter(rank, world_size, args, wb=None): params = get_params() params.update(vars(args)) - fix_random_seed(params.seed) + #fix_random_seed(params.seed) if world_size > 1: setup_dist(rank, world_size, params.master_port) @@ -1669,7 +1669,7 @@ def run_adapter(rank, world_size, args, wb=None): for epoch in range(params.start_epoch, params.num_epochs + 1): logging.info(f"update num : {params.batch_idx_train}") scheduler.step_epoch(epoch - 1) - fix_random_seed(params.seed + epoch - 1) + #fix_random_seed(params.seed + epoch - 1) train_dl.sampler.set_epoch(epoch - 1) if tb_writer is not None: