mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-09-06 15:44:17 +00:00
leave the old code in comments for reference
This commit is contained in:
parent
9c29aaa61c
commit
b5dfe244be
@ -954,6 +954,15 @@ def run(rank, world_size, args):
|
|||||||
|
|
||||||
if params.full_libri:
|
if params.full_libri:
|
||||||
train_cuts = librispeech.train_all_shuf_cuts()
|
train_cuts = librispeech.train_all_shuf_cuts()
|
||||||
|
|
||||||
|
# previously we used the following code to load all training cuts
|
||||||
|
# strictly speaking, shuffled training cuts should be used instead
|
||||||
|
# but we leave the code here to demonstrate that there is an option
|
||||||
|
# like this to combine multiple cutsets
|
||||||
|
|
||||||
|
# train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
# train_cuts += librispeech.train_clean_360_cuts()
|
||||||
|
# train_cuts += librispeech.train_other_500_cuts()
|
||||||
else:
|
else:
|
||||||
train_cuts = librispeech.train_clean_100_cuts()
|
train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
|
||||||
|
@ -774,6 +774,15 @@ def run(rank, world_size, args):
|
|||||||
|
|
||||||
if params.full_libri:
|
if params.full_libri:
|
||||||
train_cuts = librispeech.train_all_shuf_cuts()
|
train_cuts = librispeech.train_all_shuf_cuts()
|
||||||
|
|
||||||
|
# previously we used the following code to load all training cuts,
|
||||||
|
# strictly speaking, shuffled training cuts should be used instead,
|
||||||
|
# but we leave the code here to demonstrate that there is an option
|
||||||
|
# like this to combine multiple cutsets
|
||||||
|
|
||||||
|
# train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
# train_cuts += librispeech.train_clean_360_cuts()
|
||||||
|
# train_cuts += librispeech.train_other_500_cuts()
|
||||||
else:
|
else:
|
||||||
train_cuts = librispeech.train_clean_100_cuts()
|
train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
|
||||||
|
@ -991,6 +991,15 @@ def run(rank, world_size, args):
|
|||||||
|
|
||||||
if params.full_libri:
|
if params.full_libri:
|
||||||
train_cuts = librispeech.train_all_shuf_cuts()
|
train_cuts = librispeech.train_all_shuf_cuts()
|
||||||
|
|
||||||
|
# previously we used the following code to load all training cuts,
|
||||||
|
# strictly speaking, shuffled training cuts should be used instead,
|
||||||
|
# but we leave the code here to demonstrate that there is an option
|
||||||
|
# like this to combine multiple cutsets
|
||||||
|
|
||||||
|
# train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
# train_cuts += librispeech.train_clean_360_cuts()
|
||||||
|
# train_cuts += librispeech.train_other_500_cuts()
|
||||||
else:
|
else:
|
||||||
train_cuts = librispeech.train_clean_100_cuts()
|
train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
|
||||||
|
@ -819,6 +819,15 @@ def run(rank, world_size, args):
|
|||||||
|
|
||||||
if params.full_libri:
|
if params.full_libri:
|
||||||
train_cuts = librispeech.train_all_shuf_cuts()
|
train_cuts = librispeech.train_all_shuf_cuts()
|
||||||
|
|
||||||
|
# previously we used the following code to load all training cuts,
|
||||||
|
# strictly speaking, shuffled training cuts should be used instead,
|
||||||
|
# but we leave the code here to demonstrate that there is an option
|
||||||
|
# like this to combine multiple cutsets
|
||||||
|
|
||||||
|
# train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
# train_cuts += librispeech.train_clean_360_cuts()
|
||||||
|
# train_cuts += librispeech.train_other_500_cuts()
|
||||||
else:
|
else:
|
||||||
train_cuts = librispeech.train_clean_100_cuts()
|
train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
|
||||||
|
@ -1047,6 +1047,15 @@ def run(rank, world_size, args):
|
|||||||
else:
|
else:
|
||||||
if params.full_libri:
|
if params.full_libri:
|
||||||
train_cuts = librispeech.train_all_shuf_cuts()
|
train_cuts = librispeech.train_all_shuf_cuts()
|
||||||
|
|
||||||
|
# previously we used the following code to load all training cuts,
|
||||||
|
# strictly speaking, shuffled training cuts should be used instead,
|
||||||
|
# but we leave the code here to demonstrate that there is an option
|
||||||
|
# like this to combine multiple cutsets
|
||||||
|
|
||||||
|
# train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
# train_cuts += librispeech.train_clean_360_cuts()
|
||||||
|
# train_cuts += librispeech.train_other_500_cuts()
|
||||||
else:
|
else:
|
||||||
train_cuts = librispeech.train_clean_100_cuts()
|
train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
|
||||||
|
@ -1152,6 +1152,11 @@ def run(rank, world_size, args):
|
|||||||
|
|
||||||
if params.full_libri:
|
if params.full_libri:
|
||||||
train_cuts = librispeech.train_all_shuf_cuts()
|
train_cuts = librispeech.train_all_shuf_cuts()
|
||||||
|
|
||||||
|
# previously we used the following code to load all training cuts,
|
||||||
|
# strictly speaking, shuffled training cuts should be used instead,
|
||||||
|
# but we leave the code here to demonstrate that there is an option
|
||||||
|
# like this to combine multiple cutsets
|
||||||
else:
|
else:
|
||||||
train_cuts = librispeech.train_clean_100_cuts()
|
train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
|
||||||
|
@ -1176,6 +1176,15 @@ def run(rank, world_size, args):
|
|||||||
|
|
||||||
if params.full_libri:
|
if params.full_libri:
|
||||||
train_cuts = librispeech.train_all_shuf_cuts()
|
train_cuts = librispeech.train_all_shuf_cuts()
|
||||||
|
|
||||||
|
# previously we used the following code to load all training cuts,
|
||||||
|
# strictly speaking, shuffled training cuts should be used instead,
|
||||||
|
# but we leave the code here to demonstrate that there is an option
|
||||||
|
# like this to combine multiple cutsets
|
||||||
|
|
||||||
|
# train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
# train_cuts += librispeech.train_clean_360_cuts()
|
||||||
|
# train_cuts += librispeech.train_other_500_cuts()
|
||||||
else:
|
else:
|
||||||
train_cuts = librispeech.train_clean_100_cuts()
|
train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
|
||||||
|
@ -992,6 +992,11 @@ def run(rank, world_size, args):
|
|||||||
|
|
||||||
if params.full_libri:
|
if params.full_libri:
|
||||||
train_cuts = librispeech.train_all_shuf_cuts()
|
train_cuts = librispeech.train_all_shuf_cuts()
|
||||||
|
|
||||||
|
# previously we used the following code to load all training cuts,
|
||||||
|
# strictly speaking, shuffled training cuts should be used instead,
|
||||||
|
# but we leave the code here to demonstrate that there is an option
|
||||||
|
# like this to combine multiple cutsets
|
||||||
else:
|
else:
|
||||||
train_cuts = librispeech.train_clean_100_cuts()
|
train_cuts = librispeech.train_clean_100_cuts()
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user