From e5e0363fb2f009d563a277115cfd0bbf18465daf Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Thu, 27 Apr 2023 21:12:37 +0900 Subject: [PATCH] from local --- .../.prompt_tuning.py.swp | Bin 86016 -> 86016 bytes .../prompt_tuning.py | 5 ++--- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.prompt_tuning.py.swp b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/.prompt_tuning.py.swp index c3a5a85bc980b9cf625086a6400c5a1a66c741ab..73dfd6ef88ade1156b5c40ae739e38af90153125 100644 GIT binary patch delta 393 zcmWO2K`4W99LDkIHScUBvrsad6gLM}xCq5Ta}sL1AjQhnfjM}08b`ec2bX1+tv2DG zk_+Xu%vs?iwYaP-shxZepWD;#>G?TX+sWFOfuNN%ox%Eueddv(u7AcHmfNIj>|+me zh#-u=R;h?{9ODSvm`5+_e(47lT%d#$7BPSbys+`+qX$<=pdTThR`uB;-64+yOk)^B zsClJ(WRQk~9V}xERa^SND;`lq8c}mHTkx2kCD(h}2w8)s7VGY6uUb-$;R;CnVLfKW z8-y!(LIDZHF^LI;;lX#aR7M^d>|zDu=tG0EFL*!+Idik|Yq`ny4s*M?(Bw|7jhg7z G^VdHQ&Pfpf delta 395 zcmWO2K`4W99LDkAe|!IIll9t#usPtOrZ(Y3#6`OZ$$LU-WmgC6viCHudJik7aT#rD z{8M(JoR;OJ6gMfgi$c_HzK754>G$;f+@f}ix@t$%w&{xe#PzXXBv<@arPN^sNmFGZ~m%6W>keuZEL}&2ED2E Kgc(aeHva)@1x9oL diff --git a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py index c2386263f..adc77f8f9 100755 --- a/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py +++ b/egs/librispeech/ASR/pruned_transducer_stateless_d2v_v2/prompt_tuning.py @@ -1535,7 +1535,7 @@ def run_adapter(rank, world_size, args, wb=None): params = get_params() params.update(vars(args)) - #fix_random_seed(params.seed) + fix_random_seed(params.seed) if world_size > 1: setup_dist(rank, world_size, params.master_port) @@ -1670,7 +1670,7 @@ def run_adapter(rank, world_size, args, wb=None): for epoch in range(params.start_epoch, params.num_epochs + 1): logging.info(f"update num : {params.batch_idx_train}") scheduler.step_epoch(epoch - 1) - #fix_random_seed(params.seed + epoch - 1) + fix_random_seed(params.seed + epoch - 1) train_dl.sampler.set_epoch(epoch - 1) if tb_writer is not None: @@ -1693,7 +1693,6 @@ def run_adapter(rank, world_size, args, wb=None): rank=rank, wb=wb, ) - exit() if params.print_diagnostics: diagnostic.print_diagnostics()