mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-08-08 09:32:20 +00:00
152 lines
4.6 KiB
Bash
Executable File
152 lines
4.6 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
|
|
# fix segmentation fault reported in https://github.com/k2-fsa/icefall/issues/674
|
|
export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
|
|
|
|
set -eou pipefail
|
|
|
|
stage=-1
|
|
stop_stage=100
|
|
|
|
dl_dir=$PWD/download
|
|
mkdir -p $dl_dir
|
|
|
|
. shared/parse_options.sh || exit 1
|
|
|
|
# All files generated by this script are saved in "data".
|
|
# You can safely remove "data" and rerun this script to regenerate it.
|
|
mkdir -p data
|
|
|
|
log() {
|
|
# This function is from espnet
|
|
local fname=${BASH_SOURCE[1]##*/}
|
|
echo -e "$(date '+%Y-%m-%d %H:%M:%S') (${fname}:${BASH_LINENO[0]}:${FUNCNAME[1]}) $*"
|
|
}
|
|
|
|
log "dl_dir: $dl_dir"
|
|
|
|
if [ $stage -le -1 ] && [ $stop_stage -ge -1 ]; then
|
|
log "Stage -1: build monotonic_align lib (used by ./matcha)"
|
|
for recipe in matcha; do
|
|
if [ ! -d $recipe/monotonic_align/build ]; then
|
|
cd $recipe/monotonic_align
|
|
python3 setup.py build_ext --inplace
|
|
cd ../../
|
|
else
|
|
log "monotonic_align lib for $recipe already built"
|
|
fi
|
|
done
|
|
fi
|
|
|
|
if [ $stage -le 0 ] && [ $stop_stage -ge 0 ]; then
|
|
log "Stage 0: Download data"
|
|
|
|
# The directory $dl_dir/BANSYP contains the following 3 directories
|
|
|
|
# ls -lh $dl_dir/BZNSYP/
|
|
# total 0
|
|
# drwxr-xr-x 10002 kuangfangjun root 0 Jan 4 2019 PhoneLabeling
|
|
# drwxr-xr-x 3 kuangfangjun root 0 Jan 31 2019 ProsodyLabeling
|
|
# drwxr-xr-x 10003 kuangfangjun root 0 Aug 26 17:45 Wave
|
|
|
|
# If you have trouble accessing huggingface.co, please use
|
|
#
|
|
# cd $dl_dir
|
|
# wget https://huggingface.co/openspeech/BZNSYP/resolve/main/BZNSYP.tar.bz2
|
|
# tar xf BZNSYP.tar.bz2
|
|
# cd ..
|
|
|
|
# If you have pre-downloaded it to /path/to/BZNSYP, you can create a symlink
|
|
#
|
|
# ln -sfv /path/to/BZNSYP $dl_dir/BZNSYP
|
|
#
|
|
if [ ! -d $dl_dir/BZNSYP/Wave ]; then
|
|
lhotse download baker-zh $dl_dir
|
|
fi
|
|
fi
|
|
|
|
if [ $stage -le 1 ] && [ $stop_stage -ge 1 ]; then
|
|
log "Stage 1: Prepare baker-zh manifest"
|
|
# We assume that you have downloaded the baker corpus
|
|
# to $dl_dir/BZNSYP
|
|
mkdir -p data/manifests
|
|
if [ ! -e data/manifests/.baker-zh.done ]; then
|
|
lhotse prepare baker-zh $dl_dir/BZNSYP data/manifests
|
|
touch data/manifests/.baker-zh.done
|
|
fi
|
|
fi
|
|
|
|
if [ $stage -le 2 ] && [ $stop_stage -ge 2 ]; then
|
|
log "Stage 2: Generate tokens.txt"
|
|
if [ ! -e data/tokens.txt ]; then
|
|
python3 ./local/generate_tokens.py --tokens data/tokens.txt
|
|
fi
|
|
fi
|
|
|
|
if [ $stage -le 3 ] && [ $stop_stage -ge 3 ]; then
|
|
log "Stage 3: Generate raw cutset"
|
|
if [ ! -e data/manifests/baker_zh_cuts_raw.jsonl.gz ]; then
|
|
lhotse cut simple \
|
|
-r ./data/manifests/baker_zh_recordings_all.jsonl.gz \
|
|
-s ./data/manifests/baker_zh_supervisions_all.jsonl.gz \
|
|
./data/manifests/baker_zh_cuts_raw.jsonl.gz
|
|
fi
|
|
fi
|
|
|
|
if [ $stage -le 4 ] && [ $stop_stage -ge 4 ]; then
|
|
log "Stage 4: Convert text to tokens"
|
|
if [ ! -e data/manifests/baker_zh_cuts.jsonl.gz ]; then
|
|
python3 ./local/convert_text_to_tokens.py \
|
|
--in-file ./data/manifests/baker_zh_cuts_raw.jsonl.gz \
|
|
--out-file ./data/manifests/baker_zh_cuts.jsonl.gz
|
|
fi
|
|
fi
|
|
|
|
if [ $stage -le 5 ] && [ $stop_stage -ge 5 ]; then
|
|
log "Stage 5: Generate fbank (used by ./matcha)"
|
|
mkdir -p data/fbank
|
|
if [ ! -e data/fbank/.baker-zh.done ]; then
|
|
./local/compute_fbank_baker_zh.py
|
|
touch data/fbank/.baker-zh.done
|
|
fi
|
|
|
|
if [ ! -e data/fbank/.baker-zh-validated.done ]; then
|
|
log "Validating data/fbank for baker-zh (used by ./matcha)"
|
|
python3 ./local/validate_manifest.py \
|
|
data/fbank/baker_zh_cuts.jsonl.gz
|
|
touch data/fbank/.baker-zh-validated.done
|
|
fi
|
|
fi
|
|
|
|
if [ $stage -le 6 ] && [ $stop_stage -ge 6 ]; then
|
|
log "Stage 6: Split the baker-zh cuts into train, valid and test sets (used by ./matcha)"
|
|
if [ ! -e data/fbank/.baker_zh_split.done ]; then
|
|
lhotse subset --last 600 \
|
|
data/fbank/baker_zh_cuts.jsonl.gz \
|
|
data/fbank/baker_zh_cuts_validtest.jsonl.gz
|
|
lhotse subset --first 100 \
|
|
data/fbank/baker_zh_cuts_validtest.jsonl.gz \
|
|
data/fbank/baker_zh_cuts_valid.jsonl.gz
|
|
lhotse subset --last 500 \
|
|
data/fbank/baker_zh_cuts_validtest.jsonl.gz \
|
|
data/fbank/baker_zh_cuts_test.jsonl.gz
|
|
|
|
rm data/fbank/baker_zh_cuts_validtest.jsonl.gz
|
|
|
|
n=$(( $(gunzip -c data/fbank/baker_zh_cuts.jsonl.gz | wc -l) - 600 ))
|
|
|
|
lhotse subset --first $n \
|
|
data/fbank/baker_zh_cuts.jsonl.gz \
|
|
data/fbank/baker_zh_cuts_train.jsonl.gz
|
|
|
|
touch data/fbank/.baker_zh_split.done
|
|
fi
|
|
fi
|
|
|
|
if [ $stage -le 7 ] && [ $stop_stage -ge 7 ]; then
|
|
log "Stage 6: Compute fbank mean and std (used by ./matcha)"
|
|
if [ ! -f ./data/fbank/cmvn.json ]; then
|
|
./local/compute_fbank_statistics.py ./data/fbank/baker_zh_cuts_train.jsonl.gz ./data/fbank/cmvn.json
|
|
fi
|
|
fi
|