mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-09-08 00:24:19 +00:00
Minor fixes
This commit is contained in:
parent
4ebe821769
commit
22474e9abe
@ -28,9 +28,6 @@ export CUDA_VISIBLE_DEVICES="0,1,2,3"
|
|||||||
--exp-dir pruned_transducer_stateless2/exp \
|
--exp-dir pruned_transducer_stateless2/exp \
|
||||||
--full-libri 1 \
|
--full-libri 1 \
|
||||||
--max-duration 300
|
--max-duration 300
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
@ -938,6 +935,7 @@ def scan_pessimistic_batches_for_oom(
|
|||||||
# warmup = 0.0 is so that the derivs for the pruned loss stay zero
|
# warmup = 0.0 is so that the derivs for the pruned loss stay zero
|
||||||
# (i.e. are not remembered by the decaying-average in adam), because
|
# (i.e. are not remembered by the decaying-average in adam), because
|
||||||
# we want to avoid these params being subject to shrinkage in adam.
|
# we want to avoid these params being subject to shrinkage in adam.
|
||||||
|
with torch.cuda.amp.autocast(enabled=params.use_fp16):
|
||||||
loss, _ = compute_loss(
|
loss, _ = compute_loss(
|
||||||
params=params,
|
params=params,
|
||||||
model=model,
|
model=model,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user