minor fix

This commit is contained in:
yaozengwei 2024-05-25 17:48:55 +08:00
parent 0be32f3da0
commit 4c8defb269
2 changed files with 3 additions and 4 deletions

View File

@ -15,11 +15,9 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# The model structure is modified from Daniel Povey's Zipformer
# https://github.com/k2-fsa/icefall/blob/master/egs/librispeech/ASR/pruned_transducer_stateless7/zipformer.py
import math import math
from typing import List, Optional, Tuple from typing import List, Optional
import k2 import k2
import torch import torch

View File

@ -48,6 +48,7 @@ It supports training with:
- transducer loss (default), with `--use-transducer True --use-ctc False` - transducer loss (default), with `--use-transducer True --use-ctc False`
- ctc loss (not recommended), with `--use-transducer False --use-ctc True` - ctc loss (not recommended), with `--use-transducer False --use-ctc True`
- transducer loss & ctc loss, with `--use-transducer True --use-ctc True` - transducer loss & ctc loss, with `--use-transducer True --use-ctc True`
- ctc loss & attention decoder loss, with `--use-ctc True --use-attention-decoder True `
""" """
@ -917,7 +918,7 @@ def compute_loss(
if params.use_ctc: if params.use_ctc:
info["ctc_loss"] = ctc_loss.detach().cpu().item() info["ctc_loss"] = ctc_loss.detach().cpu().item()
if params.use_attention_decoder: if params.use_attention_decoder:
info["attn_deocder_loss"] = attention_decoder_loss.detach().cpu().item() info["attn_decoder_loss"] = attention_decoder_loss.detach().cpu().item()
return loss, info return loss, info