fix executor

This commit is contained in:
Yuekai Zhang 2024-01-23 17:40:15 +08:00
parent e46e9b77ee
commit f66b266aa4

View File

@ -33,7 +33,7 @@ from lhotse import (
set_caching_enabled, set_caching_enabled,
) )
from icefall.utils import str2bool from icefall.utils import str2bool, get_executor
# Torch's multithreaded behavior needs to be disabled or # Torch's multithreaded behavior needs to be disabled or
# it wastes a lot of CPU and slow things down. # it wastes a lot of CPU and slow things down.
# Do this outside of main() in case it needs to take effect # Do this outside of main() in case it needs to take effect
@ -137,43 +137,44 @@ def compute_fbank_wenetspeech_splits(args):
set_audio_duration_mismatch_tolerance(0.01) # 10ms tolerance set_audio_duration_mismatch_tolerance(0.01) # 10ms tolerance
set_caching_enabled(False) set_caching_enabled(False)
for i in range(start, stop): with get_executor() as ex: # Initialize the executor only once.
idx = f"{i + 1}".zfill(num_digits) for i in range(start, stop):
logging.info(f"Processing {idx}/{num_splits}") idx = f"{i + 1}".zfill(num_digits)
logging.info(f"Processing {idx}/{num_splits}")
cuts_path = output_dir / f"cuts_{subset}.{idx}.jsonl.gz"
if cuts_path.is_file():
logging.info(f"{cuts_path} exists - skipping")
continue
cuts_path = output_dir / f"cuts_{subset}.{idx}.jsonl.gz" raw_cuts_path = output_dir / f"cuts_{subset}_raw.{idx}.jsonl.gz"
if cuts_path.is_file():
logging.info(f"{cuts_path} exists - skipping")
continue
raw_cuts_path = output_dir / f"cuts_{subset}_raw.{idx}.jsonl.gz" logging.info(f"Loading {raw_cuts_path}")
cut_set = CutSet.from_file(raw_cuts_path)
logging.info(f"Loading {raw_cuts_path}") logging.info("Splitting cuts into smaller chunks.")
cut_set = CutSet.from_file(raw_cuts_path) cut_set = cut_set.trim_to_supervisions(
keep_overlapping=False, min_duration=None
)
logging.info("Splitting cuts into smaller chunks.") logging.info("Computing features")
cut_set = cut_set.trim_to_supervisions( # cut_set = cut_set.compute_and_store_features_batch(
keep_overlapping=False, min_duration=None # extractor=extractor,
) # storage_path=f"{output_dir}/feats_{subset}_{idx}",
# num_workers=args.num_workers,
logging.info("Computing features") # batch_duration=args.batch_duration,
# cut_set = cut_set.compute_and_store_features_batch( # storage_type=LilcomChunkyWriter,
# extractor=extractor, # overwrite=True,
# storage_path=f"{output_dir}/feats_{subset}_{idx}", # )
# num_workers=args.num_workers, cut_set = cut_set.compute_and_store_features(
# batch_duration=args.batch_duration, extractor=extractor,
# storage_type=LilcomChunkyWriter, storage_path=f"{output_dir}/feats_{subset}_{idx}",
# overwrite=True, num_jobs=args.num_workers,
# ) executor=ex,
cut_set = cut_set.compute_and_store_features( storage_type=LilcomChunkyWriter,
extractor=extractor, )
storage_path=f"{output_dir}/feats_{subset}_{idx}", logging.info(f"Saving to {cuts_path}")
num_jobs=args.num_workers, cut_set.to_file(cuts_path)
executor=extractor,
storage_type=LilcomChunkyWriter,
)
logging.info(f"Saving to {cuts_path}")
cut_set.to_file(cuts_path)
def main(): def main():