mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-08-08 17:42:21 +00:00
Set fsa.properties to None after changing its labels in-place. (#121)
This commit is contained in:
parent
b9452235d5
commit
68506609ad
@ -607,6 +607,9 @@ def main():
|
|||||||
# Arcs entering the back-off state have label equal to #0.
|
# Arcs entering the back-off state have label equal to #0.
|
||||||
# We have to change it to 0 here.
|
# We have to change it to 0 here.
|
||||||
G.labels[G.labels >= first_word_disambig_id] = 0
|
G.labels[G.labels >= first_word_disambig_id] = 0
|
||||||
|
# See https://github.com/k2-fsa/k2/issues/874
|
||||||
|
# for why we need to set G.properties to None
|
||||||
|
G.__dict__["_properties"] = None
|
||||||
G = k2.Fsa.from_fsas([G]).to(device)
|
G = k2.Fsa.from_fsas([G]).to(device)
|
||||||
G = k2.arc_sort(G)
|
G = k2.arc_sort(G)
|
||||||
# Save a dummy value so that it can be loaded in C++.
|
# Save a dummy value so that it can be loaded in C++.
|
||||||
|
@ -603,6 +603,9 @@ def main():
|
|||||||
# Arcs entering the back-off state have label equal to #0.
|
# Arcs entering the back-off state have label equal to #0.
|
||||||
# We have to change it to 0 here.
|
# We have to change it to 0 here.
|
||||||
G.labels[G.labels >= first_word_disambig_id] = 0
|
G.labels[G.labels >= first_word_disambig_id] = 0
|
||||||
|
# See https://github.com/k2-fsa/k2/issues/874
|
||||||
|
# for why we need to set G.properties to None
|
||||||
|
G.__dict__["_properties"] = None
|
||||||
G = k2.Fsa.from_fsas([G]).to(device)
|
G = k2.Fsa.from_fsas([G]).to(device)
|
||||||
G = k2.arc_sort(G)
|
G = k2.arc_sort(G)
|
||||||
torch.save(G.as_dict(), params.lm_dir / "G_4_gram.pt")
|
torch.save(G.as_dict(), params.lm_dir / "G_4_gram.pt")
|
||||||
|
@ -101,6 +101,9 @@ def compile_HLG(lang_dir: str) -> k2.Fsa:
|
|||||||
logging.info("Removing disambiguation symbols on LG")
|
logging.info("Removing disambiguation symbols on LG")
|
||||||
|
|
||||||
LG.labels[LG.labels >= first_token_disambig_id] = 0
|
LG.labels[LG.labels >= first_token_disambig_id] = 0
|
||||||
|
# See https://github.com/k2-fsa/k2/issues/874
|
||||||
|
# for why we need to set LG.properties to None
|
||||||
|
LG.__dict__["_properties"] = None
|
||||||
|
|
||||||
assert isinstance(LG.aux_labels, k2.RaggedTensor)
|
assert isinstance(LG.aux_labels, k2.RaggedTensor)
|
||||||
LG.aux_labels.values[LG.aux_labels.values >= first_word_disambig_id] = 0
|
LG.aux_labels.values[LG.aux_labels.values >= first_word_disambig_id] = 0
|
||||||
|
@ -422,6 +422,9 @@ def main():
|
|||||||
# Arcs entering the back-off state have label equal to #0.
|
# Arcs entering the back-off state have label equal to #0.
|
||||||
# We have to change it to 0 here.
|
# We have to change it to 0 here.
|
||||||
G.labels[G.labels >= first_word_disambig_id] = 0
|
G.labels[G.labels >= first_word_disambig_id] = 0
|
||||||
|
# See https://github.com/k2-fsa/k2/issues/874
|
||||||
|
# for why we need to set G.properties to None
|
||||||
|
G.__dict__["_properties"] = None
|
||||||
G = k2.Fsa.from_fsas([G]).to(device)
|
G = k2.Fsa.from_fsas([G]).to(device)
|
||||||
G = k2.arc_sort(G)
|
G = k2.arc_sort(G)
|
||||||
torch.save(G.as_dict(), params.lm_dir / "G_4_gram.pt")
|
torch.save(G.as_dict(), params.lm_dir / "G_4_gram.pt")
|
||||||
|
@ -79,6 +79,9 @@ def compile_HLG(lang_dir: str) -> k2.Fsa:
|
|||||||
logging.info("Removing disambiguation symbols on LG")
|
logging.info("Removing disambiguation symbols on LG")
|
||||||
|
|
||||||
LG.labels[LG.labels >= first_token_disambig_id] = 0
|
LG.labels[LG.labels >= first_token_disambig_id] = 0
|
||||||
|
# See https://github.com/k2-fsa/k2/issues/874
|
||||||
|
# for why we need to set LG.properties to None
|
||||||
|
LG.__dict__["_properties"] = None
|
||||||
|
|
||||||
assert isinstance(LG.aux_labels, k2.RaggedTensor)
|
assert isinstance(LG.aux_labels, k2.RaggedTensor)
|
||||||
LG.aux_labels.values[LG.aux_labels.values >= first_word_disambig_id] = 0
|
LG.aux_labels.values[LG.aux_labels.values >= first_word_disambig_id] = 0
|
||||||
|
Loading…
x
Reference in New Issue
Block a user