mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-08-08 09:32:20 +00:00
parent
28af269e5e
commit
9ddd811925
@ -540,6 +540,10 @@ for m in greedy_search fast_beam_search modified_beam_search ; do
|
||||
done
|
||||
```
|
||||
|
||||
Note that a small change is made to the `pruned_transducer_stateless7/decoder.py` in
|
||||
this [PR](/ceph-data4/yangxiaoyu/softwares/icefall_development/icefall_random_padding/egs/librispeech/ASR/pruned_transducer_stateless7/exp_960h_no_paddingidx_ngpu4/tensorboard) to address the
|
||||
problem of emitting the first symbol at the very beginning. If you need a
|
||||
model without this issue, please download the model from here: <https://huggingface.co/marcoyang/icefall-asr-librispeech-pruned-transducer-stateless7-2023-03-10>
|
||||
|
||||
### LibriSpeech BPE training results (Pruned Stateless LSTM RNN-T + gradient filter)
|
||||
|
||||
|
@ -58,7 +58,6 @@ class Decoder(nn.Module):
|
||||
self.embedding = nn.Embedding(
|
||||
num_embeddings=vocab_size,
|
||||
embedding_dim=embedding_dim,
|
||||
padding_idx=blank_id,
|
||||
)
|
||||
self.blank_id = blank_id
|
||||
self.unk_id = unk_id
|
||||
|
@ -59,7 +59,6 @@ class Decoder(nn.Module):
|
||||
self.embedding = ScaledEmbedding(
|
||||
num_embeddings=vocab_size,
|
||||
embedding_dim=decoder_dim,
|
||||
padding_idx=blank_id,
|
||||
)
|
||||
self.blank_id = blank_id
|
||||
|
||||
|
@ -56,7 +56,6 @@ class Decoder(nn.Module):
|
||||
self.embedding = nn.Embedding(
|
||||
num_embeddings=vocab_size,
|
||||
embedding_dim=decoder_dim,
|
||||
padding_idx=blank_id,
|
||||
)
|
||||
self.blank_id = blank_id
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user