mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-12-11 06:55:27 +00:00
from local
This commit is contained in:
parent
e039441a0f
commit
24ba63ed5b
Binary file not shown.
Binary file not shown.
@ -1013,6 +1013,7 @@ def train_one_epoch(
|
||||
scaler.scale(loss).backward()
|
||||
else:
|
||||
logging.warning(f"Grad scale is small: {cur_grad_scale}")
|
||||
|
||||
if params.multi_optim and batch_idx % params.accum_grads == 0:
|
||||
set_batch_count(model, params.batch_idx_train)
|
||||
scheduler_enc.step_batch(params.batch_idx_train)
|
||||
|
||||
@ -36,13 +36,12 @@ else
|
||||
--enable-spec-aug False \
|
||||
--multi-optim True \
|
||||
--world-size 4 \
|
||||
--start-batch 34000 \
|
||||
--num-epochs 30 \
|
||||
--start-epoch 6 \
|
||||
--full-libri 1 \
|
||||
--exp-dir ./pruned_transducer_stateless_d2v_v2/$1 \
|
||||
--max-duration 150 \
|
||||
--freeze-finetune-updates 2000 \
|
||||
--use-fp16 1 \
|
||||
--freeze-finetune-updates 2000 # --use-fp16 1 \
|
||||
--peak-enc-lr 0.001 \
|
||||
--peak-dec-lr 0.5 \
|
||||
--accum-grads 3 \
|
||||
@ -55,3 +54,5 @@ else
|
||||
--context-size 2 \
|
||||
--ctc-loss-scale 0.2
|
||||
fi
|
||||
|
||||
#--start-epoch 6 \
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user