example usage

This commit is contained in:
Guo Liyong 2022-05-27 10:28:34 +08:00
parent 14a4d1d6f2
commit d9a6aff863

View File

@ -1,4 +1,15 @@
# This is an example to do distillation with librispeech clean-100 subset.
# run with command:
# bash distillation_with_hubert.sh [0|1|2|3|4]
#
# For example command
# bash distillation_with_hubert.sh 0
# will download hubert model.
stage=$1
# Set the GPUs available.
# This script requires at least one GPU.
# Example:
export CUDA_VISIBLE_DEVICES="2,3,4,5"
@ -23,13 +34,17 @@ if [ $stage -eq 0 ]; then
hubert_model=${hubert_model_dir}/${model_id}.pt
mkdir -p ${hubert_model_dir}
# For more models refer to: https://github.com/pytorch/fairseq/tree/main/examples/hubert
wget -c https://dl.fbaipublicfiles.com/hubert/${model_id} -P ${hubert_model_dir}
wget -c wget https://dl.fbaipublicfiles.com/fairseq/wav2vec/dict.ltr.txt -P ${hubert_model_dir}
if [ -f ${hubert_model} ]; then
echo "hubert model alread exists."
else
wget -c https://dl.fbaipublicfiles.com/hubert/${model_id} -P ${hubert_model}
wget -c wget https://dl.fbaipublicfiles.com/fairseq/wav2vec/dict.ltr.txt -P ${hubert_model_dir}
fi
fi
if [ ! -d ./data/fbank ]; then
echo "This script assumes ./data/fbank is already generated by prepare.sh"
exit 0
exit 1
fi
if [ $stage -eq 1 ]; then