mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-08-08 09:32:20 +00:00
minor fix for docstr and default param. (#1490)
* Update train.py and README.md
This commit is contained in:
parent
b9e6327adf
commit
a813186f64
@ -74,6 +74,9 @@ The [LibriSpeech][librispeech] recipe supports the most comprehensive set of mod
|
||||
- LSTM-based Predictor
|
||||
- [Stateless Predictor](https://research.google/pubs/rnn-transducer-with-stateless-prediction-network/)
|
||||
|
||||
#### Whisper
|
||||
- [OpenAi Whisper](https://arxiv.org/abs/2212.04356) (We support fine-tuning on AiShell-1.)
|
||||
|
||||
If you are willing to contribute to icefall, please refer to [contributing](https://icefall.readthedocs.io/en/latest/contributing/index.html) for more details.
|
||||
|
||||
We would like to highlight the performance of some of the recipes here.
|
||||
|
@ -19,7 +19,7 @@
|
||||
Usage:
|
||||
|
||||
#fine-tuning with deepspeed zero stage 1
|
||||
torchrun --nproc-per-node 8 ./whisper/train.py \
|
||||
torchrun --nproc_per_node 8 ./whisper/train.py \
|
||||
--max-duration 200 \
|
||||
--exp-dir whisper/exp_large_v2 \
|
||||
--model-name large-v2 \
|
||||
@ -28,7 +28,7 @@ torchrun --nproc-per-node 8 ./whisper/train.py \
|
||||
--deepspeed_config ./whisper/ds_config_zero1.json
|
||||
|
||||
# fine-tuning with ddp
|
||||
torchrun --nproc-per-node 8 ./whisper/train.py \
|
||||
torchrun --nproc_per_node 8 ./whisper/train.py \
|
||||
--max-duration 200 \
|
||||
--exp-dir whisper/exp_medium \
|
||||
--manifest-dir data/fbank_whisper \
|
||||
@ -136,7 +136,7 @@ def get_parser():
|
||||
parser.add_argument(
|
||||
"--exp-dir",
|
||||
type=str,
|
||||
default="pruned_transducer_stateless7/exp",
|
||||
default="whisper/exp",
|
||||
help="""The experiment dir.
|
||||
It specifies the directory where all training related
|
||||
files, e.g., checkpoints, log, etc, are saved
|
||||
|
Loading…
x
Reference in New Issue
Block a user