commit 67dbb25620467ce9341e2f4c2f646f0b6d4b36b2 Author: csukuangfj Date: Mon Jul 25 04:08:43 2022 +0000 deploy: d99796898cc369123dfdea8a0f660fe174a33c35 diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 000000000..46f8d58fb --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: cfc3e6ecc44ed7573f700065af8738a7 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 000000000..e69de29bb diff --git a/_images/aishell-conformer-ctc-tensorboard-log.jpg b/_images/aishell-conformer-ctc-tensorboard-log.jpg new file mode 100644 index 000000000..c8b26f741 Binary files /dev/null and b/_images/aishell-conformer-ctc-tensorboard-log.jpg differ diff --git a/_images/aishell-tdnn-lstm-ctc-tensorboard-log.jpg b/_images/aishell-tdnn-lstm-ctc-tensorboard-log.jpg new file mode 100644 index 000000000..b31db3ab5 Binary files /dev/null and b/_images/aishell-tdnn-lstm-ctc-tensorboard-log.jpg differ diff --git a/_images/aishell-transducer_stateless_modified-tensorboard-log.png b/_images/aishell-transducer_stateless_modified-tensorboard-log.png new file mode 100644 index 000000000..6c84b28f2 Binary files /dev/null and b/_images/aishell-transducer_stateless_modified-tensorboard-log.png differ diff --git a/_images/device-CPU_CUDA-orange.svg b/_images/device-CPU_CUDA-orange.svg new file mode 100644 index 000000000..a023a1283 --- /dev/null +++ b/_images/device-CPU_CUDA-orange.svg @@ -0,0 +1 @@ +device: CPU | CUDAdeviceCPU | CUDA diff --git a/_images/doc-contrib.png b/_images/doc-contrib.png new file mode 100644 index 000000000..00906ab83 Binary files /dev/null and b/_images/doc-contrib.png differ diff --git a/_images/hugging-face-sherpa-2.png b/_images/hugging-face-sherpa-2.png new file mode 100644 index 000000000..3b47bd51b Binary files /dev/null and b/_images/hugging-face-sherpa-2.png differ diff --git a/_images/hugging-face-sherpa-3.png b/_images/hugging-face-sherpa-3.png new file mode 100644 index 000000000..1d7a2d316 Binary files /dev/null and b/_images/hugging-face-sherpa-3.png differ diff --git a/_images/hugging-face-sherpa.png b/_images/hugging-face-sherpa.png new file mode 100644 index 000000000..dea0b1d46 Binary files /dev/null and b/_images/hugging-face-sherpa.png differ diff --git a/_images/k2-gt-v1.9-blueviolet.svg b/_images/k2-gt-v1.9-blueviolet.svg new file mode 100644 index 000000000..534b2e534 --- /dev/null +++ b/_images/k2-gt-v1.9-blueviolet.svg @@ -0,0 +1 @@ +k2: >= v1.9k2>= v1.9 \ No newline at end of file diff --git a/_images/librispeech-conformer-ctc-tensorboard-log.png b/_images/librispeech-conformer-ctc-tensorboard-log.png new file mode 100644 index 000000000..4e8c2ea7c Binary files /dev/null and b/_images/librispeech-conformer-ctc-tensorboard-log.png differ diff --git a/_images/logo.png b/_images/logo.png new file mode 100644 index 000000000..84d42568c Binary files /dev/null and b/_images/logo.png differ diff --git a/_images/os-Linux_macOS-ff69b4.svg b/_images/os-Linux_macOS-ff69b4.svg new file mode 100644 index 000000000..178813ed4 --- /dev/null +++ b/_images/os-Linux_macOS-ff69b4.svg @@ -0,0 +1 @@ +os: Linux | macOSosLinux | macOS diff --git a/_images/pre-commit-check-success.png b/_images/pre-commit-check-success.png new file mode 100644 index 000000000..3c6ee9b1c Binary files /dev/null and b/_images/pre-commit-check-success.png differ diff --git a/_images/pre-commit-check.png b/_images/pre-commit-check.png new file mode 100644 index 000000000..80784eced Binary files /dev/null and b/_images/pre-commit-check.png differ diff --git a/_images/python-gt-v3.6-blue.svg b/_images/python-gt-v3.6-blue.svg new file mode 100644 index 000000000..4254dc58a --- /dev/null +++ b/_images/python-gt-v3.6-blue.svg @@ -0,0 +1 @@ +python: >= 3.6python>= 3.6 \ No newline at end of file diff --git a/_images/tdnn-tensorboard-log.png b/_images/tdnn-tensorboard-log.png new file mode 100644 index 000000000..3d2612c9c Binary files /dev/null and b/_images/tdnn-tensorboard-log.png differ diff --git a/_images/torch-gt-v1.6.0-green.svg b/_images/torch-gt-v1.6.0-green.svg new file mode 100644 index 000000000..d3ece9a17 --- /dev/null +++ b/_images/torch-gt-v1.6.0-green.svg @@ -0,0 +1 @@ +torch: >= 1.6.0torch>= 1.6.0 \ No newline at end of file diff --git a/_sources/contributing/code-style.rst.txt b/_sources/contributing/code-style.rst.txt new file mode 100644 index 000000000..7d61a3ba1 --- /dev/null +++ b/_sources/contributing/code-style.rst.txt @@ -0,0 +1,67 @@ +.. _follow the code style: + +Follow the code style +===================== + +We use the following tools to make the code style to be as consistent as possible: + + - `black `_, to format the code + - `flake8 `_, to check the style and quality of the code + - `isort `_, to sort ``imports`` + +The following versions of the above tools are used: + + - ``black == 12.6b0`` + - ``flake8 == 3.9.2`` + - ``isort == 5.9.2`` + +After running the following commands: + + .. code-block:: + + $ git clone https://github.com/k2-fsa/icefall + $ cd icefall + $ pip install pre-commit + $ pre-commit install + +it will run the following checks whenever you run ``git commit``, **automatically**: + + .. figure:: images/pre-commit-check.png + :width: 600 + :align: center + + pre-commit hooks invoked by ``git commit`` (Failed). + +If any of the above checks failed, your ``git commit`` was not successful. +Please fix any issues reported by the check tools. + +.. HINT:: + + Some of the check tools, i.e., ``black`` and ``isort`` will modify + the files to be commited **in-place**. So please run ``git status`` + after failure to see which file has been modified by the tools + before you make any further changes. + +After fixing all the failures, run ``git commit`` again and +it should succeed this time: + + .. figure:: images/pre-commit-check-success.png + :width: 600 + :align: center + + pre-commit hooks invoked by ``git commit`` (Succeeded). + +If you want to check the style of your code before ``git commit``, you +can do the following: + + .. code-block:: bash + + $ cd icefall + $ pip install black==21.6b0 flake8==3.9.2 isort==5.9.2 + $ black --check your_changed_file.py + $ black your_changed_file.py # modify it in-place + $ + $ flake8 your_changed_file.py + $ + $ isort --check your_changed_file.py # modify it in-place + $ isort your_changed_file.py diff --git a/_sources/contributing/doc.rst.txt b/_sources/contributing/doc.rst.txt new file mode 100644 index 000000000..893d8a15e --- /dev/null +++ b/_sources/contributing/doc.rst.txt @@ -0,0 +1,45 @@ +Contributing to Documentation +============================= + +We use `sphinx `_ +for documentation. + +Before writing documentation, you have to prepare the environment: + + .. code-block:: bash + + $ cd docs + $ pip install -r requirements.txt + +After setting up the environment, you are ready to write documentation. +Please refer to `reStructuredText Primer `_ +if you are not familiar with ``reStructuredText``. + +After writing some documentation, you can build the documentation **locally** +to preview what it looks like if it is published: + + .. code-block:: bash + + $ cd docs + $ make html + +The generated documentation is in ``docs/build/html`` and can be viewed +with the following commands: + + .. code-block:: bash + + $ cd docs/build/html + $ python3 -m http.server + +It will print:: + + Serving HTTP on 0.0.0.0 port 8000 (http://0.0.0.0:8000/) ... + +Open your browser, go to ``_, and you will see +the following: + + .. figure:: images/doc-contrib.png + :width: 600 + :align: center + + View generated documentation locally with ``python3 -m http.server``. diff --git a/_sources/contributing/how-to-create-a-recipe.rst.txt b/_sources/contributing/how-to-create-a-recipe.rst.txt new file mode 100644 index 000000000..a30fb9056 --- /dev/null +++ b/_sources/contributing/how-to-create-a-recipe.rst.txt @@ -0,0 +1,156 @@ +How to create a recipe +====================== + +.. HINT:: + + Please read :ref:`follow the code style` to adjust your code sytle. + +.. CAUTION:: + + ``icefall`` is designed to be as Pythonic as possible. Please use + Python in your recipe if possible. + +Data Preparation +---------------- + +We recommend you to prepare your training/test/validate dataset +with `lhotse `_. + +Please refer to ``_ +for how to create a recipe in ``lhotse``. + +.. HINT:: + + The ``yesno`` recipe in ``lhotse`` is a very good example. + + Please refer to ``_, + which shows how to add a new recipe to ``lhotse``. + +Suppose you would like to add a recipe for a dataset named ``foo``. +You can do the following: + +.. code-block:: + + $ cd egs + $ mkdir -p foo/ASR + $ cd foo/ASR + $ touch prepare.sh + $ chmod +x prepare.sh + +If your dataset is very simple, please follow +`egs/yesno/ASR/prepare.sh `_ +to write your own ``prepare.sh``. +Otherwise, please refer to +`egs/librispeech/ASR/prepare.sh `_ +to prepare your data. + + +Training +-------- + +Assume you have a fancy model, called ``bar`` for the ``foo`` recipe, you can +organize your files in the following way: + +.. code-block:: + + $ cd egs/foo/ASR + $ mkdir bar + $ cd bar + $ touch README.md model.py train.py decode.py asr_datamodule.py pretrained.py + +For instance , the ``yesno`` recipe has a ``tdnn`` model and its directory structure +looks like the following: + +.. code-block:: bash + + egs/yesno/ASR/tdnn/ + |-- README.md + |-- asr_datamodule.py + |-- decode.py + |-- model.py + |-- pretrained.py + `-- train.py + +**File description**: + + - ``README.md`` + + It contains information of this recipe, e.g., how to run it, what the WER is, etc. + + - ``asr_datamodule.py`` + + It provides code to create PyTorch dataloaders with train/test/validation dataset. + + - ``decode.py`` + + It takes as inputs the checkpoints saved during the training stage to decode the test + dataset(s). + + - ``model.py`` + + It contains the definition of your fancy neural network model. + + - ``pretrained.py`` + + We can use this script to do inference with a pre-trained model. + + - ``train.py`` + + It contains training code. + + +.. HINT:: + + Please take a look at + + - `egs/yesno/tdnn `_ + - `egs/librispeech/tdnn_lstm_ctc `_ + - `egs/librispeech/conformer_ctc `_ + + to get a feel what the resulting files look like. + +.. NOTE:: + + Every model in a recipe is kept to be as self-contained as possible. + We tolerate duplicate code among different recipes. + + +The training stage should be invocable by: + + .. code-block:: + + $ cd egs/foo/ASR + $ ./bar/train.py + $ ./bar/train.py --help + + +Decoding +-------- + +Please refer to + + - ``_ + + If your model is transformer/conformer based. + + - ``_ + + If your model is TDNN/LSTM based, i.e., there is no attention decoder. + + - ``_ + + If there is no LM rescoring. + +The decoding stage should be invocable by: + + .. code-block:: + + $ cd egs/foo/ASR + $ ./bar/decode.py + $ ./bar/decode.py --help + +Pre-trained model +----------------- + +Please demonstrate how to use your model for inference in ``egs/foo/ASR/bar/pretrained.py``. +If possible, please consider creating a Colab notebook to show that. diff --git a/_sources/contributing/index.rst.txt b/_sources/contributing/index.rst.txt new file mode 100644 index 000000000..21c747d33 --- /dev/null +++ b/_sources/contributing/index.rst.txt @@ -0,0 +1,22 @@ +Contributing +============ + +Contributions to ``icefall`` are very welcomed. +There are many possible ways to make contributions and +two of them are: + + - To write documentation + - To write code + + - (1) To follow the code style in the repository + - (2) To write a new recipe + +In this page, we describe how to contribute documentation +and code to ``icefall``. + +.. toctree:: + :maxdepth: 2 + + doc + code-style + how-to-create-a-recipe diff --git a/_sources/huggingface/index.rst.txt b/_sources/huggingface/index.rst.txt new file mode 100644 index 000000000..bd731793b --- /dev/null +++ b/_sources/huggingface/index.rst.txt @@ -0,0 +1,13 @@ +Huggingface +=========== + +This section describes how to find pre-trained models. +It also demonstrates how to try them from within your browser +without installing anything by using +`Huggingface spaces `_. + +.. toctree:: + :maxdepth: 2 + + pretrained-models + spaces diff --git a/_sources/huggingface/pretrained-models.rst.txt b/_sources/huggingface/pretrained-models.rst.txt new file mode 100644 index 000000000..8ae22f76f --- /dev/null +++ b/_sources/huggingface/pretrained-models.rst.txt @@ -0,0 +1,17 @@ +Pre-trained models +================== + +We have uploaded pre-trained models for all recipes in ``icefall`` +to ``_. + +You can find them by visiting the following link: + +``_. + +You can also find links of pre-trained models for a specific recipe +by looking at the corresponding ``RESULTS.md``. For instance: + + - ``_ + - ``_ + - ``_ + - ``_ diff --git a/_sources/huggingface/spaces.rst.txt b/_sources/huggingface/spaces.rst.txt new file mode 100644 index 000000000..e718c3731 --- /dev/null +++ b/_sources/huggingface/spaces.rst.txt @@ -0,0 +1,65 @@ +Huggingface spaces +================== + +We have integrated the server framework +`sherpa `_ +with `Huggingface spaces `_ +so that you can try pre-trained models from within your browser +without the need to download or install anything. + +All you need is a browser, which can be run on Windows, macOS, Linux, or even on your +iPad and your phone. + +Start your browser and visit the following address: + +``_ + +and you will see a page like the following screenshot: + +.. image:: ./pic/hugging-face-sherpa.png + :alt: screenshot of ``_ + :target: https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition + +You can: + + 1. Select a language for recognition. Currently, we provide pre-trained models + from ``icefall`` for the following languages: ``Chinese``, ``English``, and + ``Chinese+English``. + 2. After selecting the target language, you can select a pre-trained model + corresponding to the language. + 3. Select the decoding method. Currently, it provides ``greedy search`` + and ``modified_beam_search``. + 4. If you selected ``modified_beam_search``, you can choose the number of + active paths during the search. + 5. Either upload a file or record your speech for recognition. + 6. Click the button ``Submit for recognition``. + 7. Wait for a moment and you will get the recognition results. + +The following screenshot shows an example when selecting ``Chinese+English``: + +.. image:: ./pic/hugging-face-sherpa-3.png + :alt: screenshot of ``_ + :target: https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition + + +In the bottom part of the page, you can find a table of examples. You can click +one of them and then click ``Submit for recognition``. + +.. image:: ./pic/hugging-face-sherpa-2.png + :alt: screenshot of ``_ + :target: https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition + +YouTube Video +------------- + +We provide the following YouTube video demonstrating how to use +``_. + +.. note:: + + To get the latest news of `next-gen Kaldi `_, please subscribe + the following YouTube channel by `Nadira Povey `_: + + ``_ + +.. youtube:: ElN3r9dkKE4 diff --git a/_sources/index.rst.txt b/_sources/index.rst.txt new file mode 100644 index 000000000..29491e3dc --- /dev/null +++ b/_sources/index.rst.txt @@ -0,0 +1,26 @@ +.. icefall documentation master file, created by + sphinx-quickstart on Mon Aug 23 16:07:39 2021. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Icefall +======= + +.. image:: _static/logo.png + :alt: icefall logo + :width: 168px + :align: center + :target: https://github.com/k2-fsa/icefall + + +Documentation for `icefall `_, containing +speech recognition recipes using `k2 `_. + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + installation/index + recipes/index + contributing/index + huggingface/index diff --git a/_sources/installation/index.rst.txt b/_sources/installation/index.rst.txt new file mode 100644 index 000000000..c4474c3d9 --- /dev/null +++ b/_sources/installation/index.rst.txt @@ -0,0 +1,492 @@ +.. _install icefall: + +Installation +============ + +- |os| +- |device| +- |python_versions| +- |torch_versions| +- |k2_versions| + +.. |os| image:: ./images/os-Linux_macOS-ff69b4.svg + :alt: Supported operating systems + +.. |device| image:: ./images/device-CPU_CUDA-orange.svg + :alt: Supported devices + +.. |python_versions| image:: ./images/python-gt-v3.6-blue.svg + :alt: Supported python versions + +.. |torch_versions| image:: ./images/torch-gt-v1.6.0-green.svg + :alt: Supported PyTorch versions + +.. |k2_versions| image:: ./images/k2-gt-v1.9-blueviolet.svg + :alt: Supported k2 versions + +``icefall`` depends on `k2 `_ and +`lhotse `_. + +We recommend you to use the following steps to install the dependencies. + +- (0) Install PyTorch and torchaudio +- (1) Install k2 +- (2) Install lhotse + +.. caution:: + + Installation order matters. + +(0) Install PyTorch and torchaudio +---------------------------------- + +Please refer ``_ to install PyTorch +and torchaudio. + + +(1) Install k2 +-------------- + +Please refer to ``_ +to install ``k2``. + +.. CAUTION:: + + You need to install ``k2`` with a version at least **v1.9**. + +.. HINT:: + + If you have already installed PyTorch and don't want to replace it, + please install a version of ``k2`` that is compiled against the version + of PyTorch you are using. + +(2) Install lhotse +------------------ + +Please refer to ``_ +to install ``lhotse``. + + +.. hint:: + + We strongly recommend you to use:: + + pip install git+https://github.com/lhotse-speech/lhotse + + to install the latest version of lhotse. + + +(3) Download icefall +-------------------- + +``icefall`` is a collection of Python scripts; what you need is to download it +and set the environment variable ``PYTHONPATH`` to point to it. + +Assume you want to place ``icefall`` in the folder ``/tmp``. The +following commands show you how to setup ``icefall``: + + +.. code-block:: bash + + cd /tmp + git clone https://github.com/k2-fsa/icefall + cd icefall + pip install -r requirements.txt + export PYTHONPATH=/tmp/icefall:$PYTHONPATH + +.. HINT:: + + You can put several versions of ``icefall`` in the same virtual environment. + To switch among different versions of ``icefall``, just set ``PYTHONPATH`` + to point to the version you want. + + +Installation example +-------------------- + +The following shows an example about setting up the environment. + + +(1) Create a virtual environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ virtualenv -p python3.8 test-icefall + + created virtual environment CPython3.8.6.final.0-64 in 1540ms + creator CPython3Posix(dest=/ceph-fj/fangjun/test-icefall, clear=False, no_vcs_ignore=False, global=False) + seeder FromAppData(download=False, pip=bundle, setuptools=bundle, wheel=bundle, via=copy, app_data_dir=/root/fangjun/.local/share/v + irtualenv) + added seed packages: pip==21.1.3, setuptools==57.4.0, wheel==0.36.2 + activators BashActivator,CShellActivator,FishActivator,PowerShellActivator,PythonActivator,XonshActivator + + +(2) Activate your virtual environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ source test-icefall/bin/activate + +(3) Install k2 +~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ pip install k2==1.4.dev20210822+cpu.torch1.9.0 -f https://k2-fsa.org/nightly/index.html + + Looking in links: https://k2-fsa.org/nightly/index.html + Collecting k2==1.4.dev20210822+cpu.torch1.9.0 + Downloading https://k2-fsa.org/nightly/whl/k2-1.4.dev20210822%2Bcpu.torch1.9.0-cp38-cp38-linux_x86_64.whl (1.6 MB) + |________________________________| 1.6 MB 185 kB/s + Collecting graphviz + Downloading graphviz-0.17-py3-none-any.whl (18 kB) + Collecting torch==1.9.0 + Using cached torch-1.9.0-cp38-cp38-manylinux1_x86_64.whl (831.4 MB) + Collecting typing-extensions + Using cached typing_extensions-3.10.0.0-py3-none-any.whl (26 kB) + Installing collected packages: typing-extensions, torch, graphviz, k2 + Successfully installed graphviz-0.17 k2-1.4.dev20210822+cpu.torch1.9.0 torch-1.9.0 typing-extensions-3.10.0.0 + +.. WARNING:: + + We choose to install a CPU version of k2 for testing. You would probably want to install + a CUDA version of k2. + + +(4) Install lhotse +~~~~~~~~~~~~~~~~~~ + +.. code-block:: + + $ pip install git+https://github.com/lhotse-speech/lhotse + + Collecting git+https://github.com/lhotse-speech/lhotse + Cloning https://github.com/lhotse-speech/lhotse to /tmp/pip-req-build-7b1b76ge + Running command git clone -q https://github.com/lhotse-speech/lhotse /tmp/pip-req-build-7b1b76ge + Collecting audioread>=2.1.9 + Using cached audioread-2.1.9-py3-none-any.whl + Collecting SoundFile>=0.10 + Using cached SoundFile-0.10.3.post1-py2.py3-none-any.whl (21 kB) + Collecting click>=7.1.1 + Using cached click-8.0.1-py3-none-any.whl (97 kB) + Collecting cytoolz>=0.10.1 + Using cached cytoolz-0.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.9 MB) + Collecting dataclasses + Using cached dataclasses-0.6-py3-none-any.whl (14 kB) + Collecting h5py>=2.10.0 + Downloading h5py-3.4.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (4.5 MB) + |________________________________| 4.5 MB 684 kB/s + Collecting intervaltree>=3.1.0 + Using cached intervaltree-3.1.0-py2.py3-none-any.whl + Collecting lilcom>=1.1.0 + Using cached lilcom-1.1.1-cp38-cp38-linux_x86_64.whl + Collecting numpy>=1.18.1 + Using cached numpy-1.21.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (15.8 MB) + Collecting packaging + Using cached packaging-21.0-py3-none-any.whl (40 kB) + Collecting pyyaml>=5.3.1 + Using cached PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl (662 kB) + Collecting tqdm + Downloading tqdm-4.62.1-py2.py3-none-any.whl (76 kB) + |________________________________| 76 kB 2.7 MB/s + Collecting torchaudio==0.9.0 + Downloading torchaudio-0.9.0-cp38-cp38-manylinux1_x86_64.whl (1.9 MB) + |________________________________| 1.9 MB 73.1 MB/s + Requirement already satisfied: torch==1.9.0 in ./test-icefall/lib/python3.8/site-packages (from torchaudio==0.9.0->lhotse===0.8.0.dev + -2a1410b-clean) (1.9.0) + Requirement already satisfied: typing-extensions in ./test-icefall/lib/python3.8/site-packages (from torch==1.9.0->torchaudio==0.9.0- + >lhotse===0.8.0.dev-2a1410b-clean) (3.10.0.0) + Collecting toolz>=0.8.0 + Using cached toolz-0.11.1-py3-none-any.whl (55 kB) + Collecting sortedcontainers<3.0,>=2.0 + Using cached sortedcontainers-2.4.0-py2.py3-none-any.whl (29 kB) + Collecting cffi>=1.0 + Using cached cffi-1.14.6-cp38-cp38-manylinux1_x86_64.whl (411 kB) + Collecting pycparser + Using cached pycparser-2.20-py2.py3-none-any.whl (112 kB) + Collecting pyparsing>=2.0.2 + Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB) + Building wheels for collected packages: lhotse + Building wheel for lhotse (setup.py) ... done + Created wheel for lhotse: filename=lhotse-0.8.0.dev_2a1410b_clean-py3-none-any.whl size=342242 sha256=f683444afa4dc0881133206b4646a + 9d0f774224cc84000f55d0a67f6e4a37997 + Stored in directory: /tmp/pip-ephem-wheel-cache-ftu0qysz/wheels/7f/7a/8e/a0bf241336e2e3cb573e1e21e5600952d49f5162454f2e612f + WARNING: Built wheel for lhotse is invalid: Metadata 1.2 mandates PEP 440 version, but '0.8.0.dev-2a1410b-clean' is not + Failed to build lhotse + Installing collected packages: pycparser, toolz, sortedcontainers, pyparsing, numpy, cffi, tqdm, torchaudio, SoundFile, pyyaml, packa + ging, lilcom, intervaltree, h5py, dataclasses, cytoolz, click, audioread, lhotse + Running setup.py install for lhotse ... done + DEPRECATION: lhotse was installed using the legacy 'setup.py install' method, because a wheel could not be built for it. A possible + replacement is to fix the wheel build issue reported above. You can find discussion regarding this at https://github.com/pypa/pip/is + sues/8368. + Successfully installed SoundFile-0.10.3.post1 audioread-2.1.9 cffi-1.14.6 click-8.0.1 cytoolz-0.11.0 dataclasses-0.6 h5py-3.4.0 inter + valtree-3.1.0 lhotse-0.8.0.dev-2a1410b-clean lilcom-1.1.1 numpy-1.21.2 packaging-21.0 pycparser-2.20 pyparsing-2.4.7 pyyaml-5.4.1 sor + tedcontainers-2.4.0 toolz-0.11.1 torchaudio-0.9.0 tqdm-4.62.1 + +(5) Download icefall +~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: + + $ cd /tmp + $ git clone https://github.com/k2-fsa/icefall + + Cloning into 'icefall'... + remote: Enumerating objects: 500, done. + remote: Counting objects: 100% (500/500), done. + remote: Compressing objects: 100% (308/308), done. + remote: Total 500 (delta 263), reused 307 (delta 102), pack-reused 0 + Receiving objects: 100% (500/500), 172.49 KiB | 385.00 KiB/s, done. + Resolving deltas: 100% (263/263), done. + + $ cd icefall + $ pip install -r requirements.txt + + Collecting kaldilm + Downloading kaldilm-1.8.tar.gz (48 kB) + |________________________________| 48 kB 574 kB/s + Collecting kaldialign + Using cached kaldialign-0.2-cp38-cp38-linux_x86_64.whl + Collecting sentencepiece>=0.1.96 + Using cached sentencepiece-0.1.96-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB) + Collecting tensorboard + Using cached tensorboard-2.6.0-py3-none-any.whl (5.6 MB) + Requirement already satisfied: setuptools>=41.0.0 in /ceph-fj/fangjun/test-icefall/lib/python3.8/site-packages (from tensorboard->-r + requirements.txt (line 4)) (57.4.0) + Collecting absl-py>=0.4 + Using cached absl_py-0.13.0-py3-none-any.whl (132 kB) + Collecting google-auth-oauthlib<0.5,>=0.4.1 + Using cached google_auth_oauthlib-0.4.5-py2.py3-none-any.whl (18 kB) + Collecting grpcio>=1.24.3 + Using cached grpcio-1.39.0-cp38-cp38-manylinux2014_x86_64.whl (4.3 MB) + Requirement already satisfied: wheel>=0.26 in /ceph-fj/fangjun/test-icefall/lib/python3.8/site-packages (from tensorboard->-r require + ments.txt (line 4)) (0.36.2) + Requirement already satisfied: numpy>=1.12.0 in /ceph-fj/fangjun/test-icefall/lib/python3.8/site-packages (from tensorboard->-r requi + rements.txt (line 4)) (1.21.2) + Collecting protobuf>=3.6.0 + Using cached protobuf-3.17.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.0 MB) + Collecting werkzeug>=0.11.15 + Using cached Werkzeug-2.0.1-py3-none-any.whl (288 kB) + Collecting tensorboard-data-server<0.7.0,>=0.6.0 + Using cached tensorboard_data_server-0.6.1-py3-none-manylinux2010_x86_64.whl (4.9 MB) + Collecting google-auth<2,>=1.6.3 + Downloading google_auth-1.35.0-py2.py3-none-any.whl (152 kB) + |________________________________| 152 kB 1.4 MB/s + Collecting requests<3,>=2.21.0 + Using cached requests-2.26.0-py2.py3-none-any.whl (62 kB) + Collecting tensorboard-plugin-wit>=1.6.0 + Using cached tensorboard_plugin_wit-1.8.0-py3-none-any.whl (781 kB) + Collecting markdown>=2.6.8 + Using cached Markdown-3.3.4-py3-none-any.whl (97 kB) + Collecting six + Using cached six-1.16.0-py2.py3-none-any.whl (11 kB) + Collecting cachetools<5.0,>=2.0.0 + Using cached cachetools-4.2.2-py3-none-any.whl (11 kB) + Collecting rsa<5,>=3.1.4 + Using cached rsa-4.7.2-py3-none-any.whl (34 kB) + Collecting pyasn1-modules>=0.2.1 + Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB) + Collecting requests-oauthlib>=0.7.0 + Using cached requests_oauthlib-1.3.0-py2.py3-none-any.whl (23 kB) + Collecting pyasn1<0.5.0,>=0.4.6 + Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB) + Collecting urllib3<1.27,>=1.21.1 + Using cached urllib3-1.26.6-py2.py3-none-any.whl (138 kB) + Collecting certifi>=2017.4.17 + Using cached certifi-2021.5.30-py2.py3-none-any.whl (145 kB) + Collecting charset-normalizer~=2.0.0 + Using cached charset_normalizer-2.0.4-py3-none-any.whl (36 kB) + Collecting idna<4,>=2.5 + Using cached idna-3.2-py3-none-any.whl (59 kB) + Collecting oauthlib>=3.0.0 + Using cached oauthlib-3.1.1-py2.py3-none-any.whl (146 kB) + Building wheels for collected packages: kaldilm + Building wheel for kaldilm (setup.py) ... done + Created wheel for kaldilm: filename=kaldilm-1.8-cp38-cp38-linux_x86_64.whl size=897233 sha256=eccb906cafcd45bf9a7e1a1718e4534254bfb + f4c0d0cbc66eee6c88d68a63862 + Stored in directory: /root/fangjun/.cache/pip/wheels/85/7d/63/f2dd586369b8797cb36d213bf3a84a789eeb92db93d2e723c9 + Successfully built kaldilm + Installing collected packages: urllib3, pyasn1, idna, charset-normalizer, certifi, six, rsa, requests, pyasn1-modules, oauthlib, cach + etools, requests-oauthlib, google-auth, werkzeug, tensorboard-plugin-wit, tensorboard-data-server, protobuf, markdown, grpcio, google + -auth-oauthlib, absl-py, tensorboard, sentencepiece, kaldilm, kaldialign + Successfully installed absl-py-0.13.0 cachetools-4.2.2 certifi-2021.5.30 charset-normalizer-2.0.4 google-auth-1.35.0 google-auth-oaut + hlib-0.4.5 grpcio-1.39.0 idna-3.2 kaldialign-0.2 kaldilm-1.8 markdown-3.3.4 oauthlib-3.1.1 protobuf-3.17.3 pyasn1-0.4.8 pyasn1-module + s-0.2.8 requests-2.26.0 requests-oauthlib-1.3.0 rsa-4.7.2 sentencepiece-0.1.96 six-1.16.0 tensorboard-2.6.0 tensorboard-data-server-0 + .6.1 tensorboard-plugin-wit-1.8.0 urllib3-1.26.6 werkzeug-2.0.1 + + +Test Your Installation +---------------------- + +To test that your installation is successful, let us run +the `yesno recipe `_ +on CPU. + +Data preparation +~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ export PYTHONPATH=/tmp/icefall:$PYTHONPATH + $ cd /tmp/icefall + $ cd egs/yesno/ASR + $ ./prepare.sh + +The log of running ``./prepare.sh`` is: + +.. code-block:: + + 2021-08-23 19:27:26 (prepare.sh:24:main) dl_dir: /tmp/icefall/egs/yesno/ASR/download + 2021-08-23 19:27:26 (prepare.sh:27:main) stage 0: Download data + Downloading waves_yesno.tar.gz: 4.49MB [00:03, 1.39MB/s] + 2021-08-23 19:27:30 (prepare.sh:36:main) Stage 1: Prepare yesno manifest + 2021-08-23 19:27:31 (prepare.sh:42:main) Stage 2: Compute fbank for yesno + 2021-08-23 19:27:32,803 INFO [compute_fbank_yesno.py:52] Processing train + Extracting and storing features: 100%|_______________________________________________________________| 90/90 [00:01<00:00, 80.57it/s] + 2021-08-23 19:27:34,085 INFO [compute_fbank_yesno.py:52] Processing test + Extracting and storing features: 100%|______________________________________________________________| 30/30 [00:00<00:00, 248.21it/s] + 2021-08-23 19:27:34 (prepare.sh:48:main) Stage 3: Prepare lang + 2021-08-23 19:27:35 (prepare.sh:63:main) Stage 4: Prepare G + /tmp/pip-install-fcordre9/kaldilm_6899d26f2d684ad48f21025950cd2866/kaldilm/csrc/arpa_file_parser.cc:void kaldilm::ArpaFileParser::Rea + d(std::istream&):79 + [I] Reading \data\ section. + /tmp/pip-install-fcordre9/kaldilm_6899d26f2d684ad48f21025950cd2866/kaldilm/csrc/arpa_file_parser.cc:void kaldilm::ArpaFileParser::Rea + d(std::istream&):140 + [I] Reading \1-grams: section. + 2021-08-23 19:27:35 (prepare.sh:89:main) Stage 5: Compile HLG + 2021-08-23 19:27:35,928 INFO [compile_hlg.py:120] Processing data/lang_phone + 2021-08-23 19:27:35,929 INFO [lexicon.py:116] Converting L.pt to Linv.pt + 2021-08-23 19:27:35,931 INFO [compile_hlg.py:48] Building ctc_topo. max_token_id: 3 + 2021-08-23 19:27:35,932 INFO [compile_hlg.py:52] Loading G.fst.txt + 2021-08-23 19:27:35,932 INFO [compile_hlg.py:62] Intersecting L and G + 2021-08-23 19:27:35,933 INFO [compile_hlg.py:64] LG shape: (4, None) + 2021-08-23 19:27:35,933 INFO [compile_hlg.py:66] Connecting LG + 2021-08-23 19:27:35,933 INFO [compile_hlg.py:68] LG shape after k2.connect: (4, None) + 2021-08-23 19:27:35,933 INFO [compile_hlg.py:70] + 2021-08-23 19:27:35,933 INFO [compile_hlg.py:71] Determinizing LG + 2021-08-23 19:27:35,934 INFO [compile_hlg.py:74] + 2021-08-23 19:27:35,934 INFO [compile_hlg.py:76] Connecting LG after k2.determinize + 2021-08-23 19:27:35,934 INFO [compile_hlg.py:79] Removing disambiguation symbols on LG + 2021-08-23 19:27:35,934 INFO [compile_hlg.py:87] LG shape after k2.remove_epsilon: (6, None) + 2021-08-23 19:27:35,935 INFO [compile_hlg.py:92] Arc sorting LG + 2021-08-23 19:27:35,935 INFO [compile_hlg.py:95] Composing H and LG + 2021-08-23 19:27:35,935 INFO [compile_hlg.py:102] Connecting LG + 2021-08-23 19:27:35,935 INFO [compile_hlg.py:105] Arc sorting LG + 2021-08-23 19:27:35,936 INFO [compile_hlg.py:107] HLG.shape: (8, None) + 2021-08-23 19:27:35,936 INFO [compile_hlg.py:123] Saving HLG.pt to data/lang_phone + + +Training +~~~~~~~~ + +Now let us run the training part: + +.. code-block:: + + $ export CUDA_VISIBLE_DEVICES="" + $ ./tdnn/train.py + +.. CAUTION:: + + We use ``export CUDA_VISIBLE_DEVICES=""`` so that ``icefall`` uses CPU + even if there are GPUs available. + +The training log is given below: + +.. code-block:: + + 2021-08-23 19:30:31,072 INFO [train.py:465] Training started + 2021-08-23 19:30:31,072 INFO [train.py:466] {'exp_dir': PosixPath('tdnn/exp'), 'lang_dir': PosixPath('data/lang_phone'), 'lr': 0.01, + 'feature_dim': 23, 'weight_decay': 1e-06, 'start_epoch': 0, 'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, ' + best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 10, 'valid_interval': 10, 'beam_size': 10, 'reduction': 'sum', 'use_doub + le_scores': True, 'world_size': 1, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 15, 'feature_dir': PosixPath('data/fbank' + ), 'max_duration': 30.0, 'bucketing_sampler': False, 'num_buckets': 10, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0 + , 'on_the_fly_feats': False, 'shuffle': True, 'return_cuts': True, 'num_workers': 2} + 2021-08-23 19:30:31,074 INFO [lexicon.py:113] Loading pre-compiled data/lang_phone/Linv.pt + 2021-08-23 19:30:31,098 INFO [asr_datamodule.py:146] About to get train cuts + 2021-08-23 19:30:31,098 INFO [asr_datamodule.py:240] About to get train cuts + 2021-08-23 19:30:31,102 INFO [asr_datamodule.py:149] About to create train dataset + 2021-08-23 19:30:31,102 INFO [asr_datamodule.py:200] Using SingleCutSampler. + 2021-08-23 19:30:31,102 INFO [asr_datamodule.py:206] About to create train dataloader + 2021-08-23 19:30:31,102 INFO [asr_datamodule.py:219] About to get test cuts + 2021-08-23 19:30:31,102 INFO [asr_datamodule.py:246] About to get test cuts + 2021-08-23 19:30:31,357 INFO [train.py:416] Epoch 0, batch 0, batch avg loss 1.0789, total avg loss: 1.0789, batch size: 4 + 2021-08-23 19:30:31,848 INFO [train.py:416] Epoch 0, batch 10, batch avg loss 0.5356, total avg loss: 0.7556, batch size: 4 + 2021-08-23 19:30:32,301 INFO [train.py:432] Epoch 0, valid loss 0.9972, best valid loss: 0.9972 best valid epoch: 0 + 2021-08-23 19:30:32,805 INFO [train.py:416] Epoch 0, batch 20, batch avg loss 0.2436, total avg loss: 0.5717, batch size: 3 + 2021-08-23 19:30:33,109 INFO [train.py:432] Epoch 0, valid loss 0.4167, best valid loss: 0.4167 best valid epoch: 0 + 2021-08-23 19:30:33,121 INFO [checkpoint.py:62] Saving checkpoint to tdnn/exp/epoch-0.pt + 2021-08-23 19:30:33,325 INFO [train.py:416] Epoch 1, batch 0, batch avg loss 0.2214, total avg loss: 0.2214, batch size: 5 + 2021-08-23 19:30:33,798 INFO [train.py:416] Epoch 1, batch 10, batch avg loss 0.0781, total avg loss: 0.1343, batch size: 5 + 2021-08-23 19:30:34,065 INFO [train.py:432] Epoch 1, valid loss 0.0859, best valid loss: 0.0859 best valid epoch: 1 + 2021-08-23 19:30:34,556 INFO [train.py:416] Epoch 1, batch 20, batch avg loss 0.0421, total avg loss: 0.0975, batch size: 3 + 2021-08-23 19:30:34,810 INFO [train.py:432] Epoch 1, valid loss 0.0431, best valid loss: 0.0431 best valid epoch: 1 + 2021-08-23 19:30:34,824 INFO [checkpoint.py:62] Saving checkpoint to tdnn/exp/epoch-1.pt + + ... ... + + 2021-08-23 19:30:49,657 INFO [train.py:416] Epoch 13, batch 0, batch avg loss 0.0109, total avg loss: 0.0109, batch size: 5 + 2021-08-23 19:30:49,984 INFO [train.py:416] Epoch 13, batch 10, batch avg loss 0.0093, total avg loss: 0.0096, batch size: 4 + 2021-08-23 19:30:50,239 INFO [train.py:432] Epoch 13, valid loss 0.0104, best valid loss: 0.0101 best valid epoch: 12 + 2021-08-23 19:30:50,569 INFO [train.py:416] Epoch 13, batch 20, batch avg loss 0.0092, total avg loss: 0.0096, batch size: 2 + 2021-08-23 19:30:50,819 INFO [train.py:432] Epoch 13, valid loss 0.0101, best valid loss: 0.0101 best valid epoch: 13 + 2021-08-23 19:30:50,835 INFO [checkpoint.py:62] Saving checkpoint to tdnn/exp/epoch-13.pt + 2021-08-23 19:30:51,024 INFO [train.py:416] Epoch 14, batch 0, batch avg loss 0.0105, total avg loss: 0.0105, batch size: 5 + 2021-08-23 19:30:51,317 INFO [train.py:416] Epoch 14, batch 10, batch avg loss 0.0099, total avg loss: 0.0097, batch size: 4 + 2021-08-23 19:30:51,552 INFO [train.py:432] Epoch 14, valid loss 0.0108, best valid loss: 0.0101 best valid epoch: 13 + 2021-08-23 19:30:51,869 INFO [train.py:416] Epoch 14, batch 20, batch avg loss 0.0096, total avg loss: 0.0097, batch size: 5 + 2021-08-23 19:30:52,107 INFO [train.py:432] Epoch 14, valid loss 0.0102, best valid loss: 0.0101 best valid epoch: 13 + 2021-08-23 19:30:52,126 INFO [checkpoint.py:62] Saving checkpoint to tdnn/exp/epoch-14.pt + 2021-08-23 19:30:52,128 INFO [train.py:537] Done! + +Decoding +~~~~~~~~ + +Let us use the trained model to decode the test set: + +.. code-block:: + + $ ./tdnn/decode.py + +The decoding log is: + +.. code-block:: + + 2021-08-23 19:35:30,192 INFO [decode.py:249] Decoding started + 2021-08-23 19:35:30,192 INFO [decode.py:250] {'exp_dir': PosixPath('tdnn/exp'), 'lang_dir': PosixPath('data/lang_phone'), 'lm_dir': PosixPath('data/lm'), 'feature_dim': 23, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'epoch': 14, 'avg': 2, 'feature_dir': PosixPath('data/fbank'), 'max_duration': 30.0, 'bucketing_sampler': False, 'num_buckets': 10, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'return_cuts': True, 'num_workers': 2} + 2021-08-23 19:35:30,193 INFO [lexicon.py:113] Loading pre-compiled data/lang_phone/Linv.pt + 2021-08-23 19:35:30,213 INFO [decode.py:259] device: cpu + 2021-08-23 19:35:30,217 INFO [decode.py:279] averaging ['tdnn/exp/epoch-13.pt', 'tdnn/exp/epoch-14.pt'] + /tmp/icefall/icefall/checkpoint.py:146: UserWarning: floor_divide is deprecated, and will be removed in a future version of pytorch. + It currently rounds toward 0 (like the 'trunc' function NOT 'floor'). This results in incorrect rounding for negative values. + To keep the current behavior, use torch.div(a, b, rounding_mode='trunc'), or for actual floor division, use torch.div(a, b, rounding_mode='floor'). (Triggered internally at /pytorch/aten/src/ATen/native/BinaryOps.cpp:450.) + avg[k] //= n + 2021-08-23 19:35:30,220 INFO [asr_datamodule.py:219] About to get test cuts + 2021-08-23 19:35:30,220 INFO [asr_datamodule.py:246] About to get test cuts + 2021-08-23 19:35:30,409 INFO [decode.py:190] batch 0/8, cuts processed until now is 4 + 2021-08-23 19:35:30,571 INFO [decode.py:228] The transcripts are stored in tdnn/exp/recogs-test_set.txt + 2021-08-23 19:35:30,572 INFO [utils.py:317] [test_set] %WER 0.42% [1 / 240, 0 ins, 1 del, 0 sub ] + 2021-08-23 19:35:30,573 INFO [decode.py:236] Wrote detailed error stats to tdnn/exp/errs-test_set.txt + 2021-08-23 19:35:30,573 INFO [decode.py:299] Done! + +**Congratulations!** You have successfully setup the environment and have run the first recipe in ``icefall``. + +Have fun with ``icefall``! + +YouTube Video +------------- + +We provide the following YouTube video showing how to install ``icefall``. +It also shows how to debug various problems that you may encounter while +using ``icefall``. + +.. note:: + + To get the latest news of `next-gen Kaldi `_, please subscribe + the following YouTube channel by `Nadira Povey `_: + + ``_ + +.. youtube:: LVmrBD0tLfE diff --git a/_sources/recipes/aishell/conformer_ctc.rst.txt b/_sources/recipes/aishell/conformer_ctc.rst.txt new file mode 100644 index 000000000..75a2a8eca --- /dev/null +++ b/_sources/recipes/aishell/conformer_ctc.rst.txt @@ -0,0 +1,747 @@ +Conformer CTC +============= + +This tutorial shows you how to run a conformer ctc model +with the `Aishell `_ dataset. + + +.. HINT:: + + We assume you have read the page :ref:`install icefall` and have setup + the environment for ``icefall``. + +.. HINT:: + + We recommend you to use a GPU or several GPUs to run this recipe. + +In this tutorial, you will learn: + + - (1) How to prepare data for training and decoding + - (2) How to start the training, either with a single GPU or multiple GPUs + - (3) How to do decoding after training, with ctc-decoding, 1best and attention decoder rescoring + - (4) How to use a pre-trained model, provided by us + +Data preparation +---------------- + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./prepare.sh + +The script ``./prepare.sh`` handles the data preparation for you, **automagically**. +All you need to do is to run it. + +The data preparation contains several stages, you can use the following two +options: + + - ``--stage`` + - ``--stop-stage`` + +to control which stage(s) should be run. By default, all stages are executed. + + +For example, + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./prepare.sh --stage 0 --stop-stage 0 + +means to run only stage 0. + +To run stage 2 to stage 5, use: + +.. code-block:: bash + + $ ./prepare.sh --stage 2 --stop-stage 5 + +.. HINT:: + + If you have pre-downloaded the `Aishell `_ + dataset and the `musan `_ dataset, say, + they are saved in ``/tmp/aishell`` and ``/tmp/musan``, you can modify + the ``dl_dir`` variable in ``./prepare.sh`` to point to ``/tmp`` so that + ``./prepare.sh`` won't re-download them. + +.. HINT:: + + A 3-gram language model will be downloaded from huggingface, we assume you have + intalled and initialized ``git-lfs``. If not, you could install ``git-lfs`` by + + .. code-block:: bash + + $ sudo apt-get install git-lfs + $ git-lfs install + + If you don't have the ``sudo`` permission, you could download the + `git-lfs binary `_ here, then add it to you ``PATH``. + +.. NOTE:: + + All generated files by ``./prepare.sh``, e.g., features, lexicon, etc, + are saved in ``./data`` directory. + + +Training +-------- + +Configurable options +~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./conformer_ctc/train.py --help + +shows you the training options that can be passed from the commandline. +The following options are used quite often: + + - ``--exp-dir`` + + The experiment folder to save logs and model checkpoints, + default ``./conformer_ctc/exp``. + + - ``--num-epochs`` + + It is the number of epochs to train. For instance, + ``./conformer_ctc/train.py --num-epochs 30`` trains for 30 epochs + and generates ``epoch-0.pt``, ``epoch-1.pt``, ..., ``epoch-29.pt`` + in the folder set by ``--exp-dir``. + + - ``--start-epoch`` + + It's used to resume training. + ``./conformer_ctc/train.py --start-epoch 10`` loads the + checkpoint ``./conformer_ctc/exp/epoch-9.pt`` and starts + training from epoch 10, based on the state from epoch 9. + + - ``--world-size`` + + It is used for multi-GPU single-machine DDP training. + + - (a) If it is 1, then no DDP training is used. + + - (b) If it is 2, then GPU 0 and GPU 1 are used for DDP training. + + The following shows some use cases with it. + + **Use case 1**: You have 4 GPUs, but you only want to use GPU 0 and + GPU 2 for training. You can do the following: + + .. code-block:: bash + + $ cd egs/aishell/ASR + $ export CUDA_VISIBLE_DEVICES="0,2" + $ ./conformer_ctc/train.py --world-size 2 + + **Use case 2**: You have 4 GPUs and you want to use all of them + for training. You can do the following: + + .. code-block:: bash + + $ cd egs/aishell/ASR + $ ./conformer_ctc/train.py --world-size 4 + + **Use case 3**: You have 4 GPUs but you only want to use GPU 3 + for training. You can do the following: + + .. code-block:: bash + + $ cd egs/aishell/ASR + $ export CUDA_VISIBLE_DEVICES="3" + $ ./conformer_ctc/train.py --world-size 1 + + .. CAUTION:: + + Only multi-GPU single-machine DDP training is implemented at present. + Multi-GPU multi-machine DDP training will be added later. + + - ``--max-duration`` + + It specifies the number of seconds over all utterances in a + batch, before **padding**. + If you encounter CUDA OOM, please reduce it. For instance, if + your are using V100 NVIDIA GPU, we recommend you to set it to ``200``. + + .. HINT:: + + Due to padding, the number of seconds of all utterances in a + batch will usually be larger than ``--max-duration``. + + A larger value for ``--max-duration`` may cause OOM during training, + while a smaller value may increase the training time. You have to + tune it. + + +Pre-configured options +~~~~~~~~~~~~~~~~~~~~~~ + +There are some training options, e.g., weight decay, +number of warmup steps, etc, +that are not passed from the commandline. +They are pre-configured by the function ``get_params()`` in +`conformer_ctc/train.py `_ + +You don't need to change these pre-configured parameters. If you really need to change +them, please modify ``./conformer_ctc/train.py`` directly. + + +.. CAUTION:: + + The training set is perturbed by speed with two factors: 0.9 and 1.1. + Each epoch actually processes ``3x150 == 450`` hours of data. + + +Training logs +~~~~~~~~~~~~~ + +Training logs and checkpoints are saved in the folder set by ``--exp-dir`` +(default ``conformer_ctc/exp``). You will find the following files in that directory: + + - ``epoch-0.pt``, ``epoch-1.pt``, ... + + These are checkpoint files, containing model ``state_dict`` and optimizer ``state_dict``. + To resume training from some checkpoint, say ``epoch-10.pt``, you can use: + + .. code-block:: bash + + $ ./conformer_ctc/train.py --start-epoch 11 + + - ``tensorboard/`` + + This folder contains TensorBoard logs. Training loss, validation loss, learning + rate, etc, are recorded in these logs. You can visualize them by: + + .. code-block:: bash + + $ cd conformer_ctc/exp/tensorboard + $ tensorboard dev upload --logdir . --name "Aishell conformer ctc training with icefall" --description "Training with new LabelSmoothing loss, see https://github.com/k2-fsa/icefall/pull/109" + + It will print something like below: + + .. code-block:: + + TensorFlow installation not found - running with reduced feature set. + Upload started and will continue reading any new data as it's added to the logdir. + + To stop uploading, press Ctrl-C. + + New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/engw8KSkTZqS24zBV5dgCg/ + + [2021-11-22T11:09:27] Started scanning logdir. + [2021-11-22T11:10:14] Total uploaded: 116068 scalars, 0 tensors, 0 binary objects + Listening for new data in logdir... + + Note there is a URL in the above output, click it and you will see + the following screenshot: + + .. figure:: images/aishell-conformer-ctc-tensorboard-log.jpg + :width: 600 + :alt: TensorBoard screenshot + :align: center + :target: https://tensorboard.dev/experiment/WE1DocDqRRCOSAgmGyClhg/ + + TensorBoard screenshot. + + - ``log/log-train-xxxx`` + + It is the detailed training log in text format, same as the one + you saw printed to the console during training. + +Usage examples +~~~~~~~~~~~~~~ + +The following shows typical use cases: + +**Case 1** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./conformer_ctc/train.py --max-duration 200 + +It uses ``--max-duration`` of 200 to avoid OOM. + + +**Case 2** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ export CUDA_VISIBLE_DEVICES="0,3" + $ ./conformer_ctc/train.py --world-size 2 + +It uses GPU 0 and GPU 3 for DDP training. + +**Case 3** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./conformer_ctc/train.py --num-epochs 10 --start-epoch 3 + +It loads checkpoint ``./conformer_ctc/exp/epoch-2.pt`` and starts +training from epoch 3. Also, it trains for 10 epochs. + +Decoding +-------- + +The decoding part uses checkpoints saved by the training part, so you have +to run the training part first. + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./conformer_ctc/decode.py --help + +shows the options for decoding. + +The commonly used options are: + + - ``--method`` + + This specifies the decoding method. + + The following command uses attention decoder for rescoring: + + .. code-block:: + + $ cd egs/aishell/ASR + $ ./conformer_ctc/decode.py --method attention-decoder --max-duration 30 --nbest-scale 0.5 + + - ``--nbest-scale`` + + It is used to scale down lattice scores so that there are more unique + paths for rescoring. + + - ``--max-duration`` + + It has the same meaning as the one during training. A larger + value may cause OOM. + +Pre-trained Model +----------------- + +We have uploaded a pre-trained model to +``_. + +We describe how to use the pre-trained model to transcribe a sound file or +multiple sound files in the following. + +Install kaldifeat +~~~~~~~~~~~~~~~~~ + +`kaldifeat `_ is used to +extract features for a single sound file or multiple sound files +at the same time. + +Please refer to ``_ for installation. + +Download the pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following commands describe how to download the pre-trained model: + +.. code-block:: + + $ cd egs/aishell/ASR + $ mkdir tmp + $ cd tmp + $ git lfs install + $ git clone https://huggingface.co/pkufool/icefall_asr_aishell_conformer_ctc + +.. CAUTION:: + + You have to use ``git lfs`` to download the pre-trained model. + +.. CAUTION:: + + In order to use this pre-trained model, your k2 version has to be v1.7 or later. + +After downloading, you will have the following files: + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ tree tmp + +.. code-block:: bash + + tmp/ + `-- icefall_asr_aishell_conformer_ctc + |-- README.md + |-- data + | `-- lang_char + | |-- HLG.pt + | |-- tokens.txt + | `-- words.txt + |-- exp + | `-- pretrained.pt + `-- test_waves + |-- BAC009S0764W0121.wav + |-- BAC009S0764W0122.wav + |-- BAC009S0764W0123.wav + `-- trans.txt + + 5 directories, 9 files + +**File descriptions**: + + - ``data/lang_char/HLG.pt`` + + It is the decoding graph. + + - ``data/lang_char/tokens.txt`` + + It contains tokens and their IDs. + Provided only for convenience so that you can look up the SOS/EOS ID easily. + + - ``data/lang_char/words.txt`` + + It contains words and their IDs. + + - ``exp/pretrained.pt`` + + It contains pre-trained model parameters, obtained by averaging + checkpoints from ``epoch-25.pt`` to ``epoch-84.pt``. + Note: We have removed optimizer ``state_dict`` to reduce file size. + + - ``test_waves/*.wav`` + + It contains some test sound files from Aishell ``test`` dataset. + + - ``test_waves/trans.txt`` + + It contains the reference transcripts for the sound files in `test_waves/`. + +The information of the test sound files is listed below: + +.. code-block:: bash + + $ soxi tmp/icefall_asr_aishell_conformer_ctc/test_wavs/*.wav + + Input File : 'tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.20 = 67263 samples ~ 315.295 CDDA sectors + File Size : 135k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + + Input File : 'tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.12 = 65840 samples ~ 308.625 CDDA sectors + File Size : 132k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + + Input File : 'tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.00 = 64000 samples ~ 300 CDDA sectors + File Size : 128k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + Total Duration of 3 files: 00:00:12.32 + +Usage +~~~~~ + +.. code-block:: + + $ cd egs/aishell/ASR + $ ./conformer_ctc/pretrained.py --help + +displays the help information. + +It supports three decoding methods: + + - CTC decoding + - HLG decoding + - HLG + attention decoder rescoring + +CTC decoding +^^^^^^^^^^^^ + +CTC decoding only uses the ctc topology for decoding without a lexicon and language model + +The command to run CTC decoding is: + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./conformer_ctc/pretrained.py \ + --checkpoint ./tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt \ + --tokens-file ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/tokens.txt \ + --method ctc-decoding \ + ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0121.wav \ + ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0122.wav \ + ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0123.wav + +The output is given below: + +.. code-block:: + + 2021-11-18 07:53:41,707 INFO [pretrained.py:229] {'sample_rate': 16000, 'subsampling_factor': 4, 'feature_dim': 80, 'nhead': 4, 'attention_dim': 512, 'num_decoder_layers': 6, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f2fd997f752ed11bbef4c306652c433e83f9cf12', 'k2-git-date': 'Sun Sep 19 09:41:46 2021', 'lhotse-version': '0.11.0.dev+git.33cfe45.clean', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'aishell', 'icefall-git-sha1': 'd57a873-dirty', 'icefall-git-date': 'Wed Nov 17 19:53:25 2021', 'icefall-path': '/ceph-hw/kangwei/code/icefall_aishell3', 'k2-path': '/ceph-hw/kangwei/code/k2_release/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-hw/kangwei/code/lhotse/lhotse/__init__.py'}, 'checkpoint': './tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt', 'tokens_file': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/tokens.txt', 'words_file': None, 'HLG': None, 'method': 'ctc-decoding', 'num_paths': 100, 'ngram_lm_scale': 0.3, 'attention_decoder_scale': 0.9, 'nbest_scale': 0.5, 'sos_id': 1, 'eos_id': 1, 'num_classes': 4336, 'sound_files': ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav']} + 2021-11-18 07:53:41,708 INFO [pretrained.py:240] device: cuda:0 + 2021-11-18 07:53:41,708 INFO [pretrained.py:242] Creating model + 2021-11-18 07:53:51,131 INFO [pretrained.py:259] Constructing Fbank computer + 2021-11-18 07:53:51,134 INFO [pretrained.py:269] Reading sound files: ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav'] + 2021-11-18 07:53:51,138 INFO [pretrained.py:275] Decoding started + 2021-11-18 07:53:51,241 INFO [pretrained.py:293] Use CTC decoding + 2021-11-18 07:53:51,704 INFO [pretrained.py:369] + ./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav: + 甚 至 出 现 交 易 几 乎 停 止 的 情 况 + + ./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav: + 一 二 线 城 市 虽 然 也 处 于 调 整 中 + + ./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav: + 但 因 为 聚 集 了 过 多 公 共 资 源 + + + 2021-11-18 07:53:51,704 INFO [pretrained.py:371] Decoding Done + + +HLG decoding +^^^^^^^^^^^^ + +HLG decoding uses the best path of the decoding lattice as the decoding result. + +The command to run HLG decoding is: + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./conformer_ctc/pretrained.py \ + --checkpoint ./tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt \ + --words-file ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt \ + --HLG ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt \ + --method 1best \ + ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0121.wav \ + ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0122.wav \ + ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0123.wav + +The output is given below: + +.. code-block:: + + 2021-11-18 07:37:38,683 INFO [pretrained.py:229] {'sample_rate': 16000, 'subsampling_factor': 4, 'feature_dim': 80, 'nhead': 4, 'attention_dim': 512, 'num_decoder_layers': 6, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f2fd997f752ed11bbef4c306652c433e83f9cf12', 'k2-git-date': 'Sun Sep 19 09:41:46 2021', 'lhotse-version': '0.11.0.dev+git.33cfe45.clean', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'aishell', 'icefall-git-sha1': 'd57a873-clean', 'icefall-git-date': 'Wed Nov 17 19:53:25 2021', 'icefall-path': '/ceph-hw/kangwei/code/icefall_aishell3', 'k2-path': '/ceph-hw/kangwei/code/k2_release/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-hw/kangwei/code/lhotse/lhotse/__init__.py'}, 'checkpoint': './tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt', 'tokens_file': None, 'words_file': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt', 'HLG': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt', 'method': '1best', 'num_paths': 100, 'ngram_lm_scale': 0.3, 'attention_decoder_scale': 0.9, 'nbest_scale': 0.5, 'sos_id': 1, 'eos_id': 1, 'num_classes': 4336, 'sound_files': ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav']} + 2021-11-18 07:37:38,684 INFO [pretrained.py:240] device: cuda:0 + 2021-11-18 07:37:38,684 INFO [pretrained.py:242] Creating model + 2021-11-18 07:37:47,651 INFO [pretrained.py:259] Constructing Fbank computer + 2021-11-18 07:37:47,654 INFO [pretrained.py:269] Reading sound files: ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav'] + 2021-11-18 07:37:47,659 INFO [pretrained.py:275] Decoding started + 2021-11-18 07:37:47,752 INFO [pretrained.py:321] Loading HLG from ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt + 2021-11-18 07:37:51,887 INFO [pretrained.py:340] Use HLG decoding + 2021-11-18 07:37:52,102 INFO [pretrained.py:370] + ./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav: + 甚至 出现 交易 几乎 停止 的 情况 + + ./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav: + 一二 线 城市 虽然 也 处于 调整 中 + + ./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav: + 但 因为 聚集 了 过多 公共 资源 + + + 2021-11-18 07:37:52,102 INFO [pretrained.py:372] Decoding Done + + +HLG decoding + attention decoder rescoring +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +It extracts n paths from the lattice, recores the extracted paths with +an attention decoder. The path with the highest score is the decoding result. + +The command to run HLG decoding + attention decoder rescoring is: + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./conformer_ctc/pretrained.py \ + --checkpoint ./tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt \ + --words-file ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt \ + --HLG ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt \ + --method attention-decoder \ + ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0121.wav \ + ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0122.wav \ + ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0123.wav + +The output is below: + +.. code-block:: + + 2021-11-18 07:42:05,965 INFO [pretrained.py:229] {'sample_rate': 16000, 'subsampling_factor': 4, 'feature_dim': 80, 'nhead': 4, 'attention_dim': 512, 'num_decoder_layers': 6, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f2fd997f752ed11bbef4c306652c433e83f9cf12', 'k2-git-date': 'Sun Sep 19 09:41:46 2021', 'lhotse-version': '0.11.0.dev+git.33cfe45.clean', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'aishell', 'icefall-git-sha1': 'd57a873-dirty', 'icefall-git-date': 'Wed Nov 17 19:53:25 2021', 'icefall-path': '/ceph-hw/kangwei/code/icefall_aishell3', 'k2-path': '/ceph-hw/kangwei/code/k2_release/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-hw/kangwei/code/lhotse/lhotse/__init__.py'}, 'checkpoint': './tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt', 'tokens_file': None, 'words_file': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt', 'HLG': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt', 'method': 'attention-decoder', 'num_paths': 100, 'ngram_lm_scale': 0.3, 'attention_decoder_scale': 0.9, 'nbest_scale': 0.5, 'sos_id': 1, 'eos_id': 1, 'num_classes': 4336, 'sound_files': ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav']} + 2021-11-18 07:42:05,966 INFO [pretrained.py:240] device: cuda:0 + 2021-11-18 07:42:05,966 INFO [pretrained.py:242] Creating model + 2021-11-18 07:42:16,821 INFO [pretrained.py:259] Constructing Fbank computer + 2021-11-18 07:42:16,822 INFO [pretrained.py:269] Reading sound files: ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav'] + 2021-11-18 07:42:16,826 INFO [pretrained.py:275] Decoding started + 2021-11-18 07:42:16,916 INFO [pretrained.py:321] Loading HLG from ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt + 2021-11-18 07:42:21,115 INFO [pretrained.py:345] Use HLG + attention decoder rescoring + 2021-11-18 07:42:21,888 INFO [pretrained.py:370] + ./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav: + 甚至 出现 交易 几乎 停止 的 情况 + + ./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav: + 一二 线 城市 虽然 也 处于 调整 中 + + ./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav: + 但 因为 聚集 了 过多 公共 资源 + + + 2021-11-18 07:42:21,889 INFO [pretrained.py:372] Decoding Done + + +Colab notebook +-------------- + +We do provide a colab notebook for this recipe showing how to use a pre-trained model. + +|aishell asr conformer ctc colab notebook| + +.. |aishell asr conformer ctc colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/1WnG17io5HEZ0Gn_cnh_VzK5QYOoiiklC + +.. HINT:: + + Due to limited memory provided by Colab, you have to upgrade to Colab Pro to + run ``HLG decoding + attention decoder rescoring``. + Otherwise, you can only run ``HLG decoding`` with Colab. + +**Congratulations!** You have finished the aishell ASR recipe with +conformer CTC models in ``icefall``. + + +If you want to deploy your trained model in C++, please read the following section. + +Deployment with C++ +------------------- + +This section describes how to deploy the pre-trained model in C++, without +Python dependencies. + +.. HINT:: + + At present, it does NOT support streaming decoding. + +First, let us compile k2 from source: + +.. code-block:: bash + + $ cd $HOME + $ git clone https://github.com/k2-fsa/k2 + $ cd k2 + $ git checkout v2.0-pre + +.. CAUTION:: + + You have to switch to the branch ``v2.0-pre``! + +.. code-block:: bash + + $ mkdir build-release + $ cd build-release + $ cmake -DCMAKE_BUILD_TYPE=Release .. + $ make -j hlg_decode + + # You will find four binaries in `./bin`, i.e. ./bin/hlg_decode, + +Now you are ready to go! + +Assume you have run: + + .. code-block:: bash + + $ cd k2/build-release + $ ln -s /path/to/icefall-asr-aishell-conformer-ctc ./ + +To view the usage of ``./bin/hlg_decode``, run: + +.. code-block:: + + $ ./bin/hlg_decode + +It will show you the following message: + +.. code-block:: bash + + Please provide --nn_model + + This file implements decoding with an HLG decoding graph. + + Usage: + ./bin/hlg_decode \ + --use_gpu true \ + --nn_model \ + --hlg \ + --word_table \ + \ + \ + + + To see all possible options, use + ./bin/hlg_decode --help + + Caution: + - Only sound files (*.wav) with single channel are supported. + - It assumes the model is conformer_ctc/transformer.py from icefall. + If you use a different model, you have to change the code + related to `model.forward` in this file. + + +HLG decoding +^^^^^^^^^^^^ + +.. code-block:: bash + + ./bin/hlg_decode \ + --use_gpu true \ + --nn_model icefall_asr_aishell_conformer_ctc/exp/cpu_jit.pt \ + --hlg icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt \ + --word_table icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt \ + icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav \ + icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav \ + icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav + +The output is: + +.. code-block:: + + 2021-11-18 14:48:20.89 [I] k2/torch/bin/hlg_decode.cu:115:int main(int, char**) Device: cpu + 2021-11-18 14:48:20.89 [I] k2/torch/bin/hlg_decode.cu:124:int main(int, char**) Load wave files + 2021-11-18 14:48:20.97 [I] k2/torch/bin/hlg_decode.cu:131:int main(int, char**) Build Fbank computer + 2021-11-18 14:48:20.98 [I] k2/torch/bin/hlg_decode.cu:142:int main(int, char**) Compute features + 2021-11-18 14:48:20.115 [I] k2/torch/bin/hlg_decode.cu:150:int main(int, char**) Load neural network model + 2021-11-18 14:48:20.693 [I] k2/torch/bin/hlg_decode.cu:165:int main(int, char**) Compute nnet_output + 2021-11-18 14:48:23.182 [I] k2/torch/bin/hlg_decode.cu:180:int main(int, char**) Load icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt + 2021-11-18 14:48:33.489 [I] k2/torch/bin/hlg_decode.cu:185:int main(int, char**) Decoding + 2021-11-18 14:48:45.217 [I] k2/torch/bin/hlg_decode.cu:216:int main(int, char**) + Decoding result: + + icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav + 甚至 出现 交易 几乎 停止 的 情况 + + icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav + 一二 线 城市 虽然 也 处于 调整 中 + + icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav + 但 因为 聚集 了 过多 公共 资源 + +There is a Colab notebook showing you how to run a torch scripted model in C++. +Please see |aishell asr conformer ctc torch script colab notebook| + +.. |aishell asr conformer ctc torch script colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/1Vh7RER7saTW01DtNbvr7CY7ovNZgmfWz?usp=sharing diff --git a/_sources/recipes/aishell/index.rst.txt b/_sources/recipes/aishell/index.rst.txt new file mode 100644 index 000000000..d072d6e9c --- /dev/null +++ b/_sources/recipes/aishell/index.rst.txt @@ -0,0 +1,22 @@ +aishell +======= + +Aishell is an open-source Chinese Mandarin speech corpus published by Beijing +Shell Shell Technology Co.,Ltd. + +400 people from different accent areas in China are invited to participate in +the recording, which is conducted in a quiet indoor environment using high +fidelity microphone and downsampled to 16kHz. The manual transcription accuracy +is above 95%, through professional speech annotation and strict quality +inspection. The data is free for academic use. We hope to provide moderate +amount of data for new researchers in the field of speech recognition. + +It can be downloaded from ``_ + +.. toctree:: + :maxdepth: 1 + + tdnn_lstm_ctc + conformer_ctc + stateless_transducer + diff --git a/_sources/recipes/aishell/stateless_transducer.rst.txt b/_sources/recipes/aishell/stateless_transducer.rst.txt new file mode 100644 index 000000000..e8137b8c1 --- /dev/null +++ b/_sources/recipes/aishell/stateless_transducer.rst.txt @@ -0,0 +1,714 @@ +Stateless Transducer +==================== + +This tutorial shows you how to do transducer training in ``icefall``. + +.. HINT:: + + Instead of using RNN-T or RNN transducer, we only use transducer + here. As you will see, there are no RNNs in the model. + +.. HINT:: + + We assume you have read the page :ref:`install icefall` and have setup + the environment for ``icefall``. + +.. HINT:: + + We recommend you to use a GPU or several GPUs to run this recipe. + +In this tutorial, you will learn: + + - (1) What does the transducer model look like + - (2) How to prepare data for training and decoding + - (3) How to start the training, either with a single GPU or with multiple GPUs + - (4) How to do decoding after training, with greedy search, beam search and, **modified beam search** + - (5) How to use a pre-trained model provided by us to transcribe sound files + + +The Model +--------- + +The transducer model consists of 3 parts: + +- **Encoder**: It is a conformer encoder with the following parameters + + - Number of heads: 8 + - Attention dim: 512 + - Number of layers: 12 + - Feedforward dim: 2048 + +- **Decoder**: We use a stateless model consisting of: + + - An embedding layer with embedding dim 512 + - A Conv1d layer with a default kernel size 2 (i.e. it sees 2 + symbols of left-context by default) + +- **Joiner**: It consists of a ``nn.tanh()`` and a ``nn.Linear()``. + +.. Caution:: + + The decoder is stateless and very simple. It is borrowed from + ``_ + (Rnn-Transducer with Stateless Prediction Network) + + We make one modification to it: Place a Conv1d layer right after + the embedding layer. + +When using Chinese characters as modelling unit, whose vocabulary size +is 4336 in this specific dataset, +the number of parameters of the model is ``87939824``, i.e., about ``88 M``. + +The Loss +-------- + +We are using ``_ +to compute the transducer loss, which removes extra paddings +in loss computation to save memory. + +.. Hint:: + + ``optimized_transducer`` implements the technqiues proposed + in `Improving RNN Transducer Modeling for End-to-End Speech Recognition `_ to save memory. + + Furthermore, it supports ``modified transducer``, limiting the maximum + number of symbols that can be emitted per frame to 1, which simplifies + the decoding process significantly. Also, the experiment results + show that it does not degrade the performance. + + See ``_ + for what exactly modified transducer is. + + ``_ shows that + in the unpruned case ``optimized_transducer`` has the advantage about minimizing + memory usage. + +.. todo:: + + Add tutorial about ``pruned_transducer_stateless`` that uses k2 + pruned transducer loss. + +.. hint:: + + You can use:: + + pip install optimized_transducer + + to install ``optimized_transducer``. Refer to + ``_ for other + alternatives. + +Data Preparation +---------------- + +To prepare the data for training, please use the following commands: + +.. code-block:: bash + + cd egs/aishell/ASR + ./prepare.sh --stop-stage 4 + ./prepare.sh --stage 6 --stop-stage 6 + +.. note:: + + You can use ``./prepare.sh``, though it will generate FSTs that + are not used in transducer training. + +When you finish running the script, you will get the following two folders: + + - ``data/fbank``: It saves the pre-computed features + - ``data/lang_char``: It contains tokens that will be used in the training + +Training +-------- + +.. code-block:: bash + + cd egs/aishell/ASR + ./transducer_stateless_modified/train.py --help + +shows you the training options that can be passed from the commandline. +The following options are used quite often: + + - ``--exp-dir`` + + The experiment folder to save logs and model checkpoints, + defaults to ``./transducer_stateless_modified/exp``. + + - ``--num-epochs`` + + It is the number of epochs to train. For instance, + ``./transducer_stateless_modified/train.py --num-epochs 30`` trains for 30 + epochs and generates ``epoch-0.pt``, ``epoch-1.pt``, ..., ``epoch-29.pt`` + in the folder set by ``--exp-dir``. + + - ``--start-epoch`` + + It's used to resume training. + ``./transducer_stateless_modified/train.py --start-epoch 10`` loads the + checkpoint from ``exp_dir/epoch-9.pt`` and starts + training from epoch 10, based on the state from epoch 9. + + - ``--world-size`` + + It is used for single-machine multi-GPU DDP training. + + - (a) If it is 1, then no DDP training is used. + + - (b) If it is 2, then GPU 0 and GPU 1 are used for DDP training. + + The following shows some use cases with it. + + **Use case 1**: You have 4 GPUs, but you only want to use GPU 0 and + GPU 2 for training. You can do the following: + + .. code-block:: bash + + $ cd egs/aishell/ASR + $ export CUDA_VISIBLE_DEVICES="0,2" + $ ./transducer_stateless_modified/train.py --world-size 2 + + **Use case 2**: You have 4 GPUs and you want to use all of them + for training. You can do the following: + + .. code-block:: bash + + $ cd egs/aishell/ASR + $ ./transducer_stateless_modified/train.py --world-size 4 + + **Use case 3**: You have 4 GPUs but you only want to use GPU 3 + for training. You can do the following: + + .. code-block:: bash + + $ cd egs/aishell/ASR + $ export CUDA_VISIBLE_DEVICES="3" + $ ./transducer_stateless_modified/train.py --world-size 1 + + .. CAUTION:: + + Only single-machine multi-GPU DDP training is implemented at present. + There is an on-going PR ``_ + that adds support for multi-machine multi-GPU DDP training. + + - ``--max-duration`` + + It specifies the number of seconds over all utterances in a + batch **before padding**. + If you encounter CUDA OOM, please reduce it. For instance, if + your are using V100 NVIDIA GPU with 32 GB RAM, we recommend you + to set it to ``300`` when the vocabulary size is 500. + + .. HINT:: + + Due to padding, the number of seconds of all utterances in a + batch will usually be larger than ``--max-duration``. + + A larger value for ``--max-duration`` may cause OOM during training, + while a smaller value may increase the training time. You have to + tune it. + + - ``--lr-factor`` + + It controls the learning rate. If you use a single GPU for training, you + may want to use a small value for it. If you use multiple GPUs for training, + you may increase it. + + - ``--context-size`` + + It specifies the kernel size in the decoder. The default value 2 means it + functions as a tri-gram LM. + + - ``--modified-transducer-prob`` + + It specifies the probability to use modified transducer loss. + If it is 0, then no modified transducer is used; if it is 1, + then it uses modified transducer loss for all batches. If it is + ``p``, it applies modified transducer with probability ``p``. + +There are some training options, e.g., +number of warmup steps, +that are not passed from the commandline. +They are pre-configured by the function ``get_params()`` in +`transducer_stateless_modified/train.py `_ + +If you need to change them, please modify ``./transducer_stateless_modified/train.py`` directly. + +.. CAUTION:: + + The training set is perturbed by speed with two factors: 0.9 and 1.1. + Each epoch actually processes ``3x150 == 450`` hours of data. + +Training logs +~~~~~~~~~~~~~ + +Training logs and checkpoints are saved in the folder set by ``--exp-dir`` +(defaults to ``transducer_stateless_modified/exp``). You will find the following files in that directory: + + - ``epoch-0.pt``, ``epoch-1.pt``, ... + + These are checkpoint files, containing model ``state_dict`` and optimizer ``state_dict``. + To resume training from some checkpoint, say ``epoch-10.pt``, you can use: + + .. code-block:: bash + + $ ./transducer_stateless_modified/train.py --start-epoch 11 + + - ``tensorboard/`` + + This folder contains TensorBoard logs. Training loss, validation loss, learning + rate, etc, are recorded in these logs. You can visualize them by: + + .. code-block:: bash + + $ cd transducer_stateless_modified/exp/tensorboard + $ tensorboard dev upload --logdir . --name "Aishell transducer training with icefall" --description "Training modified transducer, see https://github.com/k2-fsa/icefall/pull/219" + + It will print something like below: + + .. code-block:: + + TensorFlow installation not found - running with reduced feature set. + Upload started and will continue reading any new data as it's added to the logdir. + + To stop uploading, press Ctrl-C. + + New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/laGZ6HrcQxOigbFD5E0Y3Q/ + + [2022-03-03T14:29:45] Started scanning logdir. + [2022-03-03T14:29:48] Total uploaded: 8477 scalars, 0 tensors, 0 binary objects + Listening for new data in logdir... + + Note there is a `URL `_ in the + above output, click it and you will see the following screenshot: + + .. figure:: images/aishell-transducer_stateless_modified-tensorboard-log.png + :width: 600 + :alt: TensorBoard screenshot + :align: center + :target: https://tensorboard.dev/experiment/laGZ6HrcQxOigbFD5E0Y3Q + + TensorBoard screenshot. + + - ``log/log-train-xxxx`` + + It is the detailed training log in text format, same as the one + you saw printed to the console during training. + +Usage examples +~~~~~~~~~~~~~~ + +The following shows typical use cases: + +**Case 1** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./transducer_stateless_modified/train.py --max-duration 250 + +It uses ``--max-duration`` of 250 to avoid OOM. + + +**Case 2** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ export CUDA_VISIBLE_DEVICES="0,3" + $ ./transducer_stateless_modified/train.py --world-size 2 + +It uses GPU 0 and GPU 3 for DDP training. + +**Case 3** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./transducer_stateless_modified/train.py --num-epochs 10 --start-epoch 3 + +It loads checkpoint ``./transducer_stateless_modified/exp/epoch-2.pt`` and starts +training from epoch 3. Also, it trains for 10 epochs. + +Decoding +-------- + +The decoding part uses checkpoints saved by the training part, so you have +to run the training part first. + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./transducer_stateless_modified/decode.py --help + +shows the options for decoding. + +The commonly used options are: + + - ``--method`` + + This specifies the decoding method. Currently, it supports: + + - **greedy_search**. You can provide the commandline option ``--max-sym-per-frame`` + to limit the maximum number of symbols that can be emitted per frame. + + - **beam_search**. You can provide the commandline option ``--beam-size``. + + - **modified_beam_search**. You can also provide the commandline option ``--beam-size``. + To use this method, we assume that you have trained your model with modified transducer, + i.e., used the option ``--modified-transducer-prob`` in the training. + + The following command uses greedy search for decoding + + .. code-block:: + + $ cd egs/aishell/ASR + $ ./transducer_stateless_modified/decode.py \ + --epoch 64 \ + --avg 33 \ + --exp-dir ./transducer_stateless_modified/exp \ + --max-duration 100 \ + --decoding-method greedy_search \ + --max-sym-per-frame 1 + + The following command uses beam search for decoding + + .. code-block:: + + $ cd egs/aishell/ASR + $ ./transducer_stateless_modified/decode.py \ + --epoch 64 \ + --avg 33 \ + --exp-dir ./transducer_stateless_modified/exp \ + --max-duration 100 \ + --decoding-method beam_search \ + --beam-size 4 + + The following command uses ``modified`` beam search for decoding + + .. code-block:: + + $ cd egs/aishell/ASR + $ ./transducer_stateless_modified/decode.py \ + --epoch 64 \ + --avg 33 \ + --exp-dir ./transducer_stateless_modified/exp \ + --max-duration 100 \ + --decoding-method modified_beam_search \ + --beam-size 4 + + - ``--max-duration`` + + It has the same meaning as the one used in training. A larger + value may cause OOM. + + - ``--epoch`` + + It specifies the checkpoint from which epoch that should be used for decoding. + + - ``--avg`` + + It specifies the number of models to average. For instance, if it is 3 and if + ``--epoch=10``, then it averages the checkpoints ``epoch-8.pt``, ``epoch-9.pt``, + and ``epoch-10.pt`` and the averaged checkpoint is used for decoding. + +After decoding, you can find the decoding logs and results in `exp_dir/log/`, e.g., +``exp_dir/log/greedy_search``. + +Pre-trained Model +----------------- + +We have uploaded a pre-trained model to +``_ + +We describe how to use the pre-trained model to transcribe a sound file or +multiple sound files in the following. + +Install kaldifeat +~~~~~~~~~~~~~~~~~ + +`kaldifeat `_ is used to +extract features for a single sound file or multiple sound files +at the same time. + +Please refer to ``_ for installation. + +Download the pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following commands describe how to download the pre-trained model: + +.. code-block:: + + $ cd egs/aishell/ASR + $ mkdir tmp + $ cd tmp + $ git lfs install + $ git clone https://huggingface.co/csukuangfj/icefall-aishell-transducer-stateless-modified-2022-03-01 + + +.. CAUTION:: + + You have to use ``git lfs`` to download the pre-trained model. + +After downloading, you will have the following files: + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ tree tmp/icefall-aishell-transducer-stateless-modified-2022-03-01 + + +.. code-block:: bash + + tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/ + |-- README.md + |-- data + | `-- lang_char + | |-- L.pt + | |-- lexicon.txt + | |-- tokens.txt + | `-- words.txt + |-- exp + | `-- pretrained.pt + |-- log + | |-- errs-test-beam_4-epoch-64-avg-33-beam-4.txt + | |-- errs-test-greedy_search-epoch-64-avg-33-context-2-max-sym-per-frame-1.txt + | |-- log-decode-epoch-64-avg-33-beam-4-2022-03-02-12-05-03 + | |-- log-decode-epoch-64-avg-33-context-2-max-sym-per-frame-1-2022-02-28-18-13-07 + | |-- recogs-test-beam_4-epoch-64-avg-33-beam-4.txt + | `-- recogs-test-greedy_search-epoch-64-avg-33-context-2-max-sym-per-frame-1.txt + `-- test_wavs + |-- BAC009S0764W0121.wav + |-- BAC009S0764W0122.wav + |-- BAC009S0764W0123.wav + `-- transcript.txt + + 5 directories, 16 files + + +**File descriptions**: + + - ``data/lang_char`` + + It contains language related files. You can find the vocabulary size in ``tokens.txt``. + + - ``exp/pretrained.pt`` + + It contains pre-trained model parameters, obtained by averaging + checkpoints from ``epoch-32.pt`` to ``epoch-64.pt``. + Note: We have removed optimizer ``state_dict`` to reduce file size. + + - ``log`` + + It contains decoding logs and decoded results. + + - ``test_wavs`` + + It contains some test sound files from Aishell ``test`` dataset. + +The information of the test sound files is listed below: + +.. code-block:: bash + + $ soxi tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/*.wav + + Input File : 'tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.20 = 67263 samples ~ 315.295 CDDA sectors + File Size : 135k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + + Input File : 'tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.12 = 65840 samples ~ 308.625 CDDA sectors + File Size : 132k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + + Input File : 'tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.00 = 64000 samples ~ 300 CDDA sectors + File Size : 128k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + Total Duration of 3 files: 00:00:12.32 + +Usage +~~~~~ + +.. code-block:: + + $ cd egs/aishell/ASR + $ ./transducer_stateless_modified/pretrained.py --help + +displays the help information. + +It supports three decoding methods: + + - greedy search + - beam search + - modified beam search + +.. note:: + + In modified beam search, it limits the maximum number of symbols that can be + emitted per frame to 1. To use this method, you have to ensure that your model + has been trained with the option ``--modified-transducer-prob``. Otherwise, + it may give you poor results. + +Greedy search +^^^^^^^^^^^^^ + +The command to run greedy search is given below: + +.. code-block:: bash + + + $ cd egs/aishell/ASR + $ ./transducer_stateless_modified/pretrained.py \ + --checkpoint ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/exp/pretrained.pt \ + --lang-dir ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/data/lang_char \ + --method greedy_search \ + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav \ + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav \ + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav + +The output is as follows: + +.. code-block:: + + 2022-03-03 15:35:26,531 INFO [pretrained.py:239] device: cuda:0 + 2022-03-03 15:35:26,994 INFO [lexicon.py:176] Loading pre-compiled tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/data/lang_char/Linv.pt + 2022-03-03 15:35:27,027 INFO [pretrained.py:246] {'feature_dim': 80, 'encoder_out_dim': 512, 'subsampling_factor': 4, 'attention_dim': 512, 'nhead': 8, 'dim_feedforward': 2048, 'num_encoder_layers': 12, 'vgg_frontend': False, 'env_info': {'k2-version': '1.13', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f4fefe4882bc0ae59af951da3f47335d5495ef71', 'k2-git-date': 'Thu Feb 10 15:16:02 2022', 'lhotse-version': '1.0.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.2', 'python-version': '3.8', 'icefall-git-branch': 'master', 'icefall-git-sha1': '50d2281-clean', 'icefall-git-date': 'Wed Mar 2 16:02:38 2022', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-aishell', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-multi-datasets/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-aishell/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-2-0815224919-75d558775b-mmnv8', 'IP address': '10.177.72.138'}, 'sample_rate': 16000, 'checkpoint': './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/exp/pretrained.pt', 'lang_dir': PosixPath('tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/data/lang_char'), 'method': 'greedy_search', 'sound_files': ['./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav'], 'beam_size': 4, 'context_size': 2, 'max_sym_per_frame': 3, 'blank_id': 0, 'vocab_size': 4336} + 2022-03-03 15:35:27,027 INFO [pretrained.py:248] About to create model + 2022-03-03 15:35:36,878 INFO [pretrained.py:257] Constructing Fbank computer + 2022-03-03 15:35:36,880 INFO [pretrained.py:267] Reading sound files: ['./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav'] + 2022-03-03 15:35:36,891 INFO [pretrained.py:273] Decoding started + /ceph-fj/fangjun/open-source-2/icefall-aishell/egs/aishell/ASR/transducer_stateless_modified/conformer.py:113: UserWarning: __floordiv__ is deprecated, and its behavior will change in a future version of pytorch. It currently rounds toward 0 (like the 'trunc' function NOT 'floor'). This results in incorrect rounding for negative values. To keep the current behavior, use torch.div(a, b, rounding_mode='trunc'), or for actual floor division, use torch.div(a, b, rounding_mode='floor'). + lengths = ((x_lens - 1) // 2 - 1) // 2 + 2022-03-03 15:35:37,163 INFO [pretrained.py:320] + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav: + 甚 至 出 现 交 易 几 乎 停 滞 的 情 况 + + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav: + 一 二 线 城 市 虽 然 也 处 于 调 整 中 + + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav: + 但 因 为 聚 集 了 过 多 公 共 资 源 + + 2022-03-03 15:35:37,163 INFO [pretrained.py:322] Decoding Done + +Beam search +^^^^^^^^^^^ + +The command to run beam search is given below: + +.. code-block:: bash + + + $ cd egs/aishell/ASR + + $ ./transducer_stateless_modified/pretrained.py \ + --checkpoint ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/exp/pretrained.pt \ + --lang-dir ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/data/lang_char \ + --method beam_search \ + --beam-size 4 \ + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav \ + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav \ + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav + +The output is as follows: + +.. code-block:: + + 2022-03-03 15:39:09,285 INFO [pretrained.py:239] device: cuda:0 + 2022-03-03 15:39:09,708 INFO [lexicon.py:176] Loading pre-compiled tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/data/lang_char/Linv.pt + 2022-03-03 15:39:09,759 INFO [pretrained.py:246] {'feature_dim': 80, 'encoder_out_dim': 512, 'subsampling_factor': 4, 'attention_dim': 512, 'nhead': 8, 'dim_feedforward': 2048, 'num_encoder_layers': 12, 'vgg_frontend': False, 'env_info': {'k2-version': '1.13', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f4fefe4882bc0ae59af951da3f47335d5495ef71', 'k2-git-date': 'Thu Feb 10 15:16:02 2022', 'lhotse-version': '1.0.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.2', 'python-version': '3.8', 'icefall-git-branch': 'master', 'icefall-git-sha1': '50d2281-clean', 'icefall-git-date': 'Wed Mar 2 16:02:38 2022', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-aishell', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-multi-datasets/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-aishell/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-2-0815224919-75d558775b-mmnv8', 'IP address': '10.177.72.138'}, 'sample_rate': 16000, 'checkpoint': './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/exp/pretrained.pt', 'lang_dir': PosixPath('tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/data/lang_char'), 'method': 'beam_search', 'sound_files': ['./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav'], 'beam_size': 4, 'context_size': 2, 'max_sym_per_frame': 3, 'blank_id': 0, 'vocab_size': 4336} + 2022-03-03 15:39:09,760 INFO [pretrained.py:248] About to create model + 2022-03-03 15:39:18,919 INFO [pretrained.py:257] Constructing Fbank computer + 2022-03-03 15:39:18,922 INFO [pretrained.py:267] Reading sound files: ['./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav'] + 2022-03-03 15:39:18,929 INFO [pretrained.py:273] Decoding started + /ceph-fj/fangjun/open-source-2/icefall-aishell/egs/aishell/ASR/transducer_stateless_modified/conformer.py:113: UserWarning: __floordiv__ is deprecated, and its behavior will change in a future version of pytorch. It currently rounds toward 0 (like the 'trunc' function NOT 'floor'). This results in incorrect rounding for negative values. To keep the current behavior, use torch.div(a, b, rounding_mode='trunc'), or for actual floor division, use torch.div(a, b, rounding_mode='floor'). + lengths = ((x_lens - 1) // 2 - 1) // 2 + 2022-03-03 15:39:21,046 INFO [pretrained.py:320] + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav: + 甚 至 出 现 交 易 几 乎 停 滞 的 情 况 + + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav: + 一 二 线 城 市 虽 然 也 处 于 调 整 中 + + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav: + 但 因 为 聚 集 了 过 多 公 共 资 源 + + 2022-03-03 15:39:21,047 INFO [pretrained.py:322] Decoding Done + +Modified Beam search +^^^^^^^^^^^^^^^^^^^^ + +The command to run modified beam search is given below: + +.. code-block:: bash + + + $ cd egs/aishell/ASR + + $ ./transducer_stateless_modified/pretrained.py \ + --checkpoint ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/exp/pretrained.pt \ + --lang-dir ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/data/lang_char \ + --method modified_beam_search \ + --beam-size 4 \ + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav \ + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav \ + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav + +The output is as follows: + +.. code-block:: + + 2022-03-03 15:41:23,319 INFO [pretrained.py:239] device: cuda:0 + 2022-03-03 15:41:23,798 INFO [lexicon.py:176] Loading pre-compiled tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/data/lang_char/Linv.pt + 2022-03-03 15:41:23,831 INFO [pretrained.py:246] {'feature_dim': 80, 'encoder_out_dim': 512, 'subsampling_factor': 4, 'attention_dim': 512, 'nhead': 8, 'dim_feedforward': 2048, 'num_encoder_layers': 12, 'vgg_frontend': False, 'env_info': {'k2-version': '1.13', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f4fefe4882bc0ae59af951da3f47335d5495ef71', 'k2-git-date': 'Thu Feb 10 15:16:02 2022', 'lhotse-version': '1.0.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.2', 'python-version': '3.8', 'icefall-git-branch': 'master', 'icefall-git-sha1': '50d2281-clean', 'icefall-git-date': 'Wed Mar 2 16:02:38 2022', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-aishell', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-multi-datasets/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-aishell/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-2-0815224919-75d558775b-mmnv8', 'IP address': '10.177.72.138'}, 'sample_rate': 16000, 'checkpoint': './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/exp/pretrained.pt', 'lang_dir': PosixPath('tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/data/lang_char'), 'method': 'modified_beam_search', 'sound_files': ['./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav'], 'beam_size': 4, 'context_size': 2, 'max_sym_per_frame': 3, 'blank_id': 0, 'vocab_size': 4336} + 2022-03-03 15:41:23,831 INFO [pretrained.py:248] About to create model + 2022-03-03 15:41:32,214 INFO [pretrained.py:257] Constructing Fbank computer + 2022-03-03 15:41:32,215 INFO [pretrained.py:267] Reading sound files: ['./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav', './tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav'] + 2022-03-03 15:41:32,220 INFO [pretrained.py:273] Decoding started + /ceph-fj/fangjun/open-source-2/icefall-aishell/egs/aishell/ASR/transducer_stateless_modified/conformer.py:113: UserWarning: __floordiv__ is deprecated, and its behavior will change in a future version of pytorch. It currently rounds toward 0 (like the 'trunc' function NOT 'floor'). This results in incorrect rounding for negative values. To keep the current behavior, use torch.div(a, b, rounding_mode='trunc'), or for actual floor division, use torch.div(a, b, rounding_mode='floor'). + lengths = ((x_lens - 1) // 2 - 1) // 2 + /ceph-fj/fangjun/open-source-2/icefall-aishell/egs/aishell/ASR/transducer_stateless_modified/beam_search.py:402: UserWarning: __floordiv__ is deprecated, and its behavior will change in a future version of pytorch. It currently rounds toward 0 (like the 'trunc' function NOT 'floor'). This results in incorrect rounding for negative values. To keep the current behavior, use torch.div(a, b, rounding_mode='trunc'), or for actual floor division, use torch.div(a, b, rounding_mode='floor'). + topk_hyp_indexes = topk_indexes // logits.size(-1) + 2022-03-03 15:41:32,583 INFO [pretrained.py:320] + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav: + 甚 至 出 现 交 易 几 乎 停 滞 的 情 况 + + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav: + 一 二 线 城 市 虽 然 也 处 于 调 整 中 + + ./tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav: + 但 因 为 聚 集 了 过 多 公 共 资 源 + + 2022-03-03 15:41:32,583 INFO [pretrained.py:322] Decoding Done + +Colab notebook +-------------- + +We provide a colab notebook for this recipe showing how to use a pre-trained model to +transcribe sound files. + +|aishell asr stateless modified transducer colab notebook| + +.. |aishell asr stateless modified transducer colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/12jpTxJB44vzwtcmJl2DTdznW0OawPb9H?usp=sharing diff --git a/_sources/recipes/aishell/tdnn_lstm_ctc.rst.txt b/_sources/recipes/aishell/tdnn_lstm_ctc.rst.txt new file mode 100644 index 000000000..e9b0ea656 --- /dev/null +++ b/_sources/recipes/aishell/tdnn_lstm_ctc.rst.txt @@ -0,0 +1,504 @@ +TDNN-LSTM CTC +============= + +This tutorial shows you how to run a tdnn-lstm ctc model +with the `Aishell `_ dataset. + + +.. HINT:: + + We assume you have read the page :ref:`install icefall` and have setup + the environment for ``icefall``. + +.. HINT:: + + We recommend you to use a GPU or several GPUs to run this recipe. + +In this tutorial, you will learn: + + - (1) How to prepare data for training and decoding + - (2) How to start the training, either with a single GPU or multiple GPUs + - (3) How to do decoding after training. + - (4) How to use a pre-trained model, provided by us + +Data preparation +---------------- + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./prepare.sh + +The script ``./prepare.sh`` handles the data preparation for you, **automagically**. +All you need to do is to run it. + +The data preparation contains several stages, you can use the following two +options: + + - ``--stage`` + - ``--stop-stage`` + +to control which stage(s) should be run. By default, all stages are executed. + + +For example, + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./prepare.sh --stage 0 --stop-stage 0 + +means to run only stage 0. + +To run stage 2 to stage 5, use: + +.. code-block:: bash + + $ ./prepare.sh --stage 2 --stop-stage 5 + +.. HINT:: + + If you have pre-downloaded the `Aishell `_ + dataset and the `musan `_ dataset, say, + they are saved in ``/tmp/aishell`` and ``/tmp/musan``, you can modify + the ``dl_dir`` variable in ``./prepare.sh`` to point to ``/tmp`` so that + ``./prepare.sh`` won't re-download them. + +.. HINT:: + + A 3-gram language model will be downloaded from huggingface, we assume you have + intalled and initialized ``git-lfs``. If not, you could install ``git-lfs`` by + + .. code-block:: bash + + $ sudo apt-get install git-lfs + $ git-lfs install + + If you don't have the ``sudo`` permission, you could download the + `git-lfs binary `_ here, then add it to you ``PATH``. + +.. NOTE:: + + All generated files by ``./prepare.sh``, e.g., features, lexicon, etc, + are saved in ``./data`` directory. + + +Training +-------- + +Configurable options +~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./tdnn_lstm_ctc/train.py --help + +shows you the training options that can be passed from the commandline. +The following options are used quite often: + + + - ``--num-epochs`` + + It is the number of epochs to train. For instance, + ``./tdnn_lstm_ctc/train.py --num-epochs 30`` trains for 30 epochs + and generates ``epoch-0.pt``, ``epoch-1.pt``, ..., ``epoch-29.pt`` + in the folder ``./tdnn_lstm_ctc/exp``. + + - ``--start-epoch`` + + It's used to resume training. + ``./tdnn_lstm_ctc/train.py --start-epoch 10`` loads the + checkpoint ``./tdnn_lstm_ctc/exp/epoch-9.pt`` and starts + training from epoch 10, based on the state from epoch 9. + + - ``--world-size`` + + It is used for multi-GPU single-machine DDP training. + + - (a) If it is 1, then no DDP training is used. + + - (b) If it is 2, then GPU 0 and GPU 1 are used for DDP training. + + The following shows some use cases with it. + + **Use case 1**: You have 4 GPUs, but you only want to use GPU 0 and + GPU 2 for training. You can do the following: + + .. code-block:: bash + + $ cd egs/aishell/ASR + $ export CUDA_VISIBLE_DEVICES="0,2" + $ ./tdnn_lstm_ctc/train.py --world-size 2 + + **Use case 2**: You have 4 GPUs and you want to use all of them + for training. You can do the following: + + .. code-block:: bash + + $ cd egs/aishell/ASR + $ ./tdnn_lstm_ctc/train.py --world-size 4 + + **Use case 3**: You have 4 GPUs but you only want to use GPU 3 + for training. You can do the following: + + .. code-block:: bash + + $ cd egs/aishell/ASR + $ export CUDA_VISIBLE_DEVICES="3" + $ ./tdnn_lstm_ctc/train.py --world-size 1 + + .. CAUTION:: + + Only multi-GPU single-machine DDP training is implemented at present. + Multi-GPU multi-machine DDP training will be added later. + + - ``--max-duration`` + + It specifies the number of seconds over all utterances in a + batch, before **padding**. + If you encounter CUDA OOM, please reduce it. For instance, if + your are using V100 NVIDIA GPU, we recommend you to set it to ``2000``. + + .. HINT:: + + Due to padding, the number of seconds of all utterances in a + batch will usually be larger than ``--max-duration``. + + A larger value for ``--max-duration`` may cause OOM during training, + while a smaller value may increase the training time. You have to + tune it. + + +Pre-configured options +~~~~~~~~~~~~~~~~~~~~~~ + +There are some training options, e.g., weight decay, +number of warmup steps, results dir, etc, +that are not passed from the commandline. +They are pre-configured by the function ``get_params()`` in +`tdnn_lstm_ctc/train.py `_ + +You don't need to change these pre-configured parameters. If you really need to change +them, please modify ``./tdnn_lstm_ctc/train.py`` directly. + + +.. CAUTION:: + + The training set is perturbed by speed with two factors: 0.9 and 1.1. + Each epoch actually processes ``3x150 == 450`` hours of data. + + +Training logs +~~~~~~~~~~~~~ + +Training logs and checkpoints are saved in ``tdnn_lstm_ctc/exp``. +You will find the following files in that directory: + + - ``epoch-0.pt``, ``epoch-1.pt``, ... + + These are checkpoint files, containing model ``state_dict`` and optimizer ``state_dict``. + To resume training from some checkpoint, say ``epoch-10.pt``, you can use: + + .. code-block:: bash + + $ ./tdnn_lstm_ctc/train.py --start-epoch 11 + + - ``tensorboard/`` + + This folder contains TensorBoard logs. Training loss, validation loss, learning + rate, etc, are recorded in these logs. You can visualize them by: + + .. code-block:: bash + + $ cd tdnn_lstm_ctc/exp/tensorboard + $ tensorboard dev upload --logdir . --description "TDNN-LSTM CTC training for Aishell with icefall" + + It will print something like below: + + .. code-block:: + + TensorFlow installation not found - running with reduced feature set. + Upload started and will continue reading any new data as it's added to the logdir. + + To stop uploading, press Ctrl-C. + + New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/LJI9MWUORLOw3jkdhxwk8A/ + + [2021-09-13T11:59:23] Started scanning logdir. + [2021-09-13T11:59:24] Total uploaded: 4454 scalars, 0 tensors, 0 binary objects + Listening for new data in logdir... + + Note there is a URL in the above output, click it and you will see + the following screenshot: + + .. figure:: images/aishell-tdnn-lstm-ctc-tensorboard-log.jpg + :width: 600 + :alt: TensorBoard screenshot + :align: center + :target: https://tensorboard.dev/experiment/LJI9MWUORLOw3jkdhxwk8A/ + + TensorBoard screenshot. + + - ``log/log-train-xxxx`` + + It is the detailed training log in text format, same as the one + you saw printed to the console during training. + +Usage examples +~~~~~~~~~~~~~~ + +The following shows typical use cases: + +**Case 1** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ export CUDA_VISIBLE_DEVICES="0,3" + $ ./tdnn_lstm_ctc/train.py --world-size 2 + +It uses GPU 0 and GPU 3 for DDP training. + +**Case 2** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./tdnn_lstm_ctc/train.py --num-epochs 10 --start-epoch 3 + +It loads checkpoint ``./tdnn_lstm_ctc/exp/epoch-2.pt`` and starts +training from epoch 3. Also, it trains for 10 epochs. + +Decoding +-------- + +The decoding part uses checkpoints saved by the training part, so you have +to run the training part first. + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./tdnn_lstm_ctc/decode.py --help + +shows the options for decoding. + +The commonly used options are: + + - ``--method`` + + This specifies the decoding method. + + The following command uses attention decoder for rescoring: + + .. code-block:: + + $ cd egs/aishell/ASR + $ ./tdnn_lstm_ctc/decode.py --method 1best --max-duration 100 + + - ``--max-duration`` + + It has the same meaning as the one during training. A larger + value may cause OOM. + +Pre-trained Model +----------------- + +We have uploaded a pre-trained model to +``_. + +We describe how to use the pre-trained model to transcribe a sound file or +multiple sound files in the following. + +Install kaldifeat +~~~~~~~~~~~~~~~~~ + +`kaldifeat `_ is used to +extract features for a single sound file or multiple sound files +at the same time. + +Please refer to ``_ for installation. + +Download the pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following commands describe how to download the pre-trained model: + +.. code-block:: + + $ cd egs/aishell/ASR + $ mkdir tmp + $ cd tmp + $ git lfs install + $ git clone https://huggingface.co/pkufool/icefall_asr_aishell_tdnn_lstm_ctc + +.. CAUTION:: + + You have to use ``git lfs`` to download the pre-trained model. + +.. CAUTION:: + + In order to use this pre-trained model, your k2 version has to be v1.7 or later. + +After downloading, you will have the following files: + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ tree tmp + +.. code-block:: bash + + tmp/ + `-- icefall_asr_aishell_tdnn_lstm_ctc + |-- README.md + |-- data + | `-- lang_phone + | |-- HLG.pt + | |-- tokens.txt + | `-- words.txt + |-- exp + | `-- pretrained.pt + `-- test_waves + |-- BAC009S0764W0121.wav + |-- BAC009S0764W0122.wav + |-- BAC009S0764W0123.wav + `-- trans.txt + + 5 directories, 9 files + +**File descriptions**: + + - ``data/lang_phone/HLG.pt`` + + It is the decoding graph. + + - ``data/lang_phone/tokens.txt`` + + It contains tokens and their IDs. + Provided only for convenience so that you can look up the SOS/EOS ID easily. + + - ``data/lang_phone/words.txt`` + + It contains words and their IDs. + + - ``exp/pretrained.pt`` + + It contains pre-trained model parameters, obtained by averaging + checkpoints from ``epoch-18.pt`` to ``epoch-40.pt``. + Note: We have removed optimizer ``state_dict`` to reduce file size. + + - ``test_waves/*.wav`` + + It contains some test sound files from Aishell ``test`` dataset. + + - ``test_waves/trans.txt`` + + It contains the reference transcripts for the sound files in `test_waves/`. + +The information of the test sound files is listed below: + +.. code-block:: bash + + $ soxi tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_wavs/*.wav + + Input File : 'tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0121.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.20 = 67263 samples ~ 315.295 CDDA sectors + File Size : 135k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + + Input File : 'tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0122.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.12 = 65840 samples ~ 308.625 CDDA sectors + File Size : 132k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + + Input File : 'tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0123.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.00 = 64000 samples ~ 300 CDDA sectors + File Size : 128k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + Total Duration of 3 files: 00:00:12.32 + +Usage +~~~~~ + +.. code-block:: + + $ cd egs/aishell/ASR + $ ./tdnn_lstm_ctc/pretrained.py --help + +displays the help information. + + +HLG decoding +^^^^^^^^^^^^ + +HLG decoding uses the best path of the decoding lattice as the decoding result. + +The command to run HLG decoding is: + +.. code-block:: bash + + $ cd egs/aishell/ASR + $ ./tdnn_lstm_ctc/pretrained.py \ + --checkpoint ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/exp/pretrained.pt \ + --words-file ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/data/lang_phone/words.txt \ + --HLG ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/data/lang_phone/HLG.pt \ + --method 1best \ + ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_wavs/BAC009S0764W0121.wav \ + ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_wavs/BAC009S0764W0122.wav \ + ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_wavs/BAC009S0764W0123.wav + +The output is given below: + +.. code-block:: + + 2021-09-13 15:00:55,858 INFO [pretrained.py:140] device: cuda:0 + 2021-09-13 15:00:55,858 INFO [pretrained.py:142] Creating model + 2021-09-13 15:01:05,389 INFO [pretrained.py:154] Loading HLG from ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/data/lang_phone/HLG.pt + 2021-09-13 15:01:06,531 INFO [pretrained.py:161] Constructing Fbank computer + 2021-09-13 15:01:06,536 INFO [pretrained.py:171] Reading sound files: ['./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0123.wav'] + 2021-09-13 15:01:06,539 INFO [pretrained.py:177] Decoding started + 2021-09-13 15:01:06,917 INFO [pretrained.py:207] Use HLG decoding + 2021-09-13 15:01:07,129 INFO [pretrained.py:220] + ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0121.wav: + 甚至 出现 交易 几乎 停滞 的 情况 + + ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0122.wav: + 一二 线 城市 虽然 也 处于 调整 中 + + ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0123.wav: + 但 因为 聚集 了 过多 公共 资源 + + + 2021-09-13 15:01:07,129 INFO [pretrained.py:222] Decoding Done + + +Colab notebook +-------------- + +We do provide a colab notebook for this recipe showing how to use a pre-trained model. + +|aishell asr conformer ctc colab notebook| + +.. |aishell asr conformer ctc colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/1qULaGvXq7PCu_P61oubfz9b53JzY4H3z + +**Congratulations!** You have finished the aishell ASR recipe with +TDNN-LSTM CTC models in ``icefall``. diff --git a/_sources/recipes/index.rst.txt b/_sources/recipes/index.rst.txt new file mode 100644 index 000000000..9d1d83d29 --- /dev/null +++ b/_sources/recipes/index.rst.txt @@ -0,0 +1,19 @@ +Recipes +======= + +This page contains various recipes in ``icefall``. +Currently, only speech recognition recipes are provided. + +We may add recipes for other tasks as well in the future. + +.. we put the yesno recipe as the first recipe since it is the simplest one. +.. Other recipes are listed in a alphabetical order. + +.. toctree:: + :maxdepth: 2 + :caption: Table of Contents + + aishell/index + librispeech/index + timit/index + yesno/index diff --git a/_sources/recipes/librispeech/conformer_ctc.rst.txt b/_sources/recipes/librispeech/conformer_ctc.rst.txt new file mode 100644 index 000000000..4656acfd6 --- /dev/null +++ b/_sources/recipes/librispeech/conformer_ctc.rst.txt @@ -0,0 +1,1070 @@ +Conformer CTC +============= + +This tutorial shows you how to run a conformer ctc model +with the `LibriSpeech `_ dataset. + + +.. HINT:: + + We assume you have read the page :ref:`install icefall` and have setup + the environment for ``icefall``. + +.. HINT:: + + We recommend you to use a GPU or several GPUs to run this recipe. + +In this tutorial, you will learn: + + - (1) How to prepare data for training and decoding + - (2) How to start the training, either with a single GPU or multiple GPUs + - (3) How to do decoding after training, with n-gram LM rescoring and attention decoder rescoring + - (4) How to use a pre-trained model, provided by us + - (5) How to deploy your trained model in C++, without Python dependencies + +Data preparation +---------------- + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./prepare.sh + +The script ``./prepare.sh`` handles the data preparation for you, **automagically**. +All you need to do is to run it. + +The data preparation contains several stages, you can use the following two +options: + + - ``--stage`` + - ``--stop-stage`` + +to control which stage(s) should be run. By default, all stages are executed. + + +For example, + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./prepare.sh --stage 0 --stop-stage 0 + +means to run only stage 0. + +To run stage 2 to stage 5, use: + +.. code-block:: bash + + $ ./prepare.sh --stage 2 --stop-stage 5 + +.. HINT:: + + If you have pre-downloaded the `LibriSpeech `_ + dataset and the `musan `_ dataset, say, + they are saved in ``/tmp/LibriSpeech`` and ``/tmp/musan``, you can modify + the ``dl_dir`` variable in ``./prepare.sh`` to point to ``/tmp`` so that + ``./prepare.sh`` won't re-download them. + +.. NOTE:: + + All generated files by ``./prepare.sh``, e.g., features, lexicon, etc, + are saved in ``./data`` directory. + +We provide the following YouTube video showing how to run ``./prepare.sh``. + +.. note:: + + To get the latest news of `next-gen Kaldi `_, please subscribe + the following YouTube channel by `Nadira Povey `_: + + ``_ + +.. youtube:: ofEIoJL-mGM + + +Training +-------- + +Configurable options +~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/train.py --help + +shows you the training options that can be passed from the commandline. +The following options are used quite often: + + - ``--full-libri`` + + If it's True, the training part uses all the training data, i.e., + 960 hours. Otherwise, the training part uses only the subset + ``train-clean-100``, which has 100 hours of training data. + + .. CAUTION:: + + The training set is perturbed by speed with two factors: 0.9 and 1.1. + If ``--full-libri`` is True, each epoch actually processes + ``3x960 == 2880`` hours of data. + + - ``--num-epochs`` + + It is the number of epochs to train. For instance, + ``./conformer_ctc/train.py --num-epochs 30`` trains for 30 epochs + and generates ``epoch-0.pt``, ``epoch-1.pt``, ..., ``epoch-29.pt`` + in the folder ``./conformer_ctc/exp``. + + - ``--start-epoch`` + + It's used to resume training. + ``./conformer_ctc/train.py --start-epoch 10`` loads the + checkpoint ``./conformer_ctc/exp/epoch-9.pt`` and starts + training from epoch 10, based on the state from epoch 9. + + - ``--world-size`` + + It is used for multi-GPU single-machine DDP training. + + - (a) If it is 1, then no DDP training is used. + + - (b) If it is 2, then GPU 0 and GPU 1 are used for DDP training. + + The following shows some use cases with it. + + **Use case 1**: You have 4 GPUs, but you only want to use GPU 0 and + GPU 2 for training. You can do the following: + + .. code-block:: bash + + $ cd egs/librispeech/ASR + $ export CUDA_VISIBLE_DEVICES="0,2" + $ ./conformer_ctc/train.py --world-size 2 + + **Use case 2**: You have 4 GPUs and you want to use all of them + for training. You can do the following: + + .. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/train.py --world-size 4 + + **Use case 3**: You have 4 GPUs but you only want to use GPU 3 + for training. You can do the following: + + .. code-block:: bash + + $ cd egs/librispeech/ASR + $ export CUDA_VISIBLE_DEVICES="3" + $ ./conformer_ctc/train.py --world-size 1 + + .. CAUTION:: + + Only multi-GPU single-machine DDP training is implemented at present. + Multi-GPU multi-machine DDP training will be added later. + + - ``--max-duration`` + + It specifies the number of seconds over all utterances in a + batch, before **padding**. + If you encounter CUDA OOM, please reduce it. For instance, if + your are using V100 NVIDIA GPU, we recommend you to set it to ``200``. + + .. HINT:: + + Due to padding, the number of seconds of all utterances in a + batch will usually be larger than ``--max-duration``. + + A larger value for ``--max-duration`` may cause OOM during training, + while a smaller value may increase the training time. You have to + tune it. + + +Pre-configured options +~~~~~~~~~~~~~~~~~~~~~~ + +There are some training options, e.g., weight decay, +number of warmup steps, results dir, etc, +that are not passed from the commandline. +They are pre-configured by the function ``get_params()`` in +`conformer_ctc/train.py `_ + +You don't need to change these pre-configured parameters. If you really need to change +them, please modify ``./conformer_ctc/train.py`` directly. + + +Training logs +~~~~~~~~~~~~~ + +Training logs and checkpoints are saved in ``conformer_ctc/exp``. +You will find the following files in that directory: + + - ``epoch-0.pt``, ``epoch-1.pt``, ... + + These are checkpoint files, containing model ``state_dict`` and optimizer ``state_dict``. + To resume training from some checkpoint, say ``epoch-10.pt``, you can use: + + .. code-block:: bash + + $ ./conformer_ctc/train.py --start-epoch 11 + + - ``tensorboard/`` + + This folder contains TensorBoard logs. Training loss, validation loss, learning + rate, etc, are recorded in these logs. You can visualize them by: + + .. code-block:: bash + + $ cd conformer_ctc/exp/tensorboard + $ tensorboard dev upload --logdir . --description "Conformer CTC training for LibriSpeech with icefall" + + It will print something like below: + + .. code-block:: + + TensorFlow installation not found - running with reduced feature set. + Upload started and will continue reading any new data as it's added to the logdir. + + To stop uploading, press Ctrl-C. + + New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/lzGnETjwRxC3yghNMd4kPw/ + + [2021-08-24T16:42:43] Started scanning logdir. + Uploading 4540 scalars... + + Note there is a URL in the above output, click it and you will see + the following screenshot: + + .. figure:: images/librispeech-conformer-ctc-tensorboard-log.png + :width: 600 + :alt: TensorBoard screenshot + :align: center + :target: https://tensorboard.dev/experiment/lzGnETjwRxC3yghNMd4kPw/ + + TensorBoard screenshot. + + - ``log/log-train-xxxx`` + + It is the detailed training log in text format, same as the one + you saw printed to the console during training. + +Usage examples +~~~~~~~~~~~~~~ + +The following shows typical use cases: + +**Case 1** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/train.py --max-duration 200 --full-libri 0 + +It uses ``--max-duration`` of 200 to avoid OOM. Also, it uses only +a subset of the LibriSpeech data for training. + + +**Case 2** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ export CUDA_VISIBLE_DEVICES="0,3" + $ ./conformer_ctc/train.py --world-size 2 + +It uses GPU 0 and GPU 3 for DDP training. + +**Case 3** +^^^^^^^^^^ + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/train.py --num-epochs 10 --start-epoch 3 + +It loads checkpoint ``./conformer_ctc/exp/epoch-2.pt`` and starts +training from epoch 3. Also, it trains for 10 epochs. + +Decoding +-------- + +The decoding part uses checkpoints saved by the training part, so you have +to run the training part first. + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/decode.py --help + +shows the options for decoding. + +The commonly used options are: + + - ``--method`` + + This specifies the decoding method. This script supports 7 decoding methods. + As for ctc decoding, it uses a sentence piece model to convert word pieces to words. + And it needs neither a lexicon nor an n-gram LM. + + For example, the following command uses CTC topology for decoding: + + .. code-block:: + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/decode.py --method ctc-decoding --max-duration 300 + # Caution: The above command is tested with a model with vocab size 500. + + And the following command uses attention decoder for rescoring: + + .. code-block:: + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/decode.py --method attention-decoder --max-duration 30 --nbest-scale 0.5 + + - ``--nbest-scale`` + + It is used to scale down lattice scores so that there are more unique + paths for rescoring. + + - ``--max-duration`` + + It has the same meaning as the one during training. A larger + value may cause OOM. + +Here are some results for CTC decoding with a vocab size of 500: + +Usage: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + # NOTE: Tested with a model with vocab size 500. + # It won't work for a model with vocab size 5000. + $ ./conformer_ctc/decode.py \ + --epoch 25 \ + --avg 1 \ + --max-duration 300 \ + --exp-dir conformer_ctc/exp \ + --lang-dir data/lang_bpe_500 \ + --method ctc-decoding + +The output is given below: + +.. code-block:: bash + + 2021-09-26 12:44:31,033 INFO [decode.py:537] Decoding started + 2021-09-26 12:44:31,033 INFO [decode.py:538] + {'lm_dir': PosixPath('data/lm'), 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True, + 'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 6, 'search_beam': 20, 'output_beam': 8, + 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, + 'epoch': 25, 'avg': 1, 'method': 'ctc-decoding', 'num_paths': 100, 'nbest_scale': 0.5, + 'export': False, 'exp_dir': PosixPath('conformer_ctc/exp'), 'lang_dir': PosixPath('data/lang_bpe_500'), 'full_libri': False, + 'feature_dir': PosixPath('data/fbank'), 'max_duration': 100, 'bucketing_sampler': False, 'num_buckets': 30, + 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, + 'shuffle': True, 'return_cuts': True, 'num_workers': 2} + 2021-09-26 12:44:31,406 INFO [lexicon.py:113] Loading pre-compiled data/lang_bpe_500/Linv.pt + 2021-09-26 12:44:31,464 INFO [decode.py:548] device: cuda:0 + 2021-09-26 12:44:36,171 INFO [checkpoint.py:92] Loading checkpoint from conformer_ctc/exp/epoch-25.pt + 2021-09-26 12:44:36,776 INFO [decode.py:652] Number of model parameters: 109226120 + 2021-09-26 12:44:37,714 INFO [decode.py:473] batch 0/206, cuts processed until now is 12 + 2021-09-26 12:45:15,944 INFO [decode.py:473] batch 100/206, cuts processed until now is 1328 + 2021-09-26 12:45:54,443 INFO [decode.py:473] batch 200/206, cuts processed until now is 2563 + 2021-09-26 12:45:56,411 INFO [decode.py:494] The transcripts are stored in conformer_ctc/exp/recogs-test-clean-ctc-decoding.txt + 2021-09-26 12:45:56,592 INFO [utils.py:331] [test-clean-ctc-decoding] %WER 3.26% [1715 / 52576, 163 ins, 128 del, 1424 sub ] + 2021-09-26 12:45:56,807 INFO [decode.py:506] Wrote detailed error stats to conformer_ctc/exp/errs-test-clean-ctc-decoding.txt + 2021-09-26 12:45:56,808 INFO [decode.py:522] + For test-clean, WER of different settings are: + ctc-decoding 3.26 best for test-clean + + 2021-09-26 12:45:57,362 INFO [decode.py:473] batch 0/203, cuts processed until now is 15 + 2021-09-26 12:46:35,565 INFO [decode.py:473] batch 100/203, cuts processed until now is 1477 + 2021-09-26 12:47:15,106 INFO [decode.py:473] batch 200/203, cuts processed until now is 2922 + 2021-09-26 12:47:16,131 INFO [decode.py:494] The transcripts are stored in conformer_ctc/exp/recogs-test-other-ctc-decoding.txt + 2021-09-26 12:47:16,208 INFO [utils.py:331] [test-other-ctc-decoding] %WER 8.21% [4295 / 52343, 396 ins, 315 del, 3584 sub ] + 2021-09-26 12:47:16,432 INFO [decode.py:506] Wrote detailed error stats to conformer_ctc/exp/errs-test-other-ctc-decoding.txt + 2021-09-26 12:47:16,432 INFO [decode.py:522] + For test-other, WER of different settings are: + ctc-decoding 8.21 best for test-other + + 2021-09-26 12:47:16,433 INFO [decode.py:680] Done! + +Pre-trained Model +----------------- + +We have uploaded a pre-trained model to +``_ + +We describe how to use the pre-trained model to transcribe a sound file or +multiple sound files in the following. + +Install kaldifeat +~~~~~~~~~~~~~~~~~ + +`kaldifeat `_ is used to +extract features for a single sound file or multiple sound files +at the same time. + +Please refer to ``_ for installation. + +Download the pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following commands describe how to download the pre-trained model: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09 + $ cd icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09 + $ git lfs pull + +.. CAUTION:: + + You have to use ``git lfs pull`` to download the pre-trained model. + Otherwise, you will have the following issue when running ``decode.py``: + + .. code-block:: + + _pickle.UnpicklingError: invalid load key, 'v' + + To fix that issue, please use: + + .. code-block:: bash + + cd icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09 + git lfs pull + +.. CAUTION:: + + In order to use this pre-trained model, your k2 version has to be v1.9 or later. + +After downloading, you will have the following files: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ tree icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09 + +.. code-block:: bash + + icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09 + |-- README.md + |-- data + | |-- lang_bpe_500 + | | |-- HLG.pt + | | |-- HLG_modified.pt + | | |-- bpe.model + | | |-- tokens.txt + | | `-- words.txt + | `-- lm + | `-- G_4_gram.pt + |-- exp + | |-- cpu_jit.pt + | `-- pretrained.pt + |-- log + | `-- log-decode-2021-11-09-17-38-28 + `-- test_wavs + |-- 1089-134686-0001.wav + |-- 1221-135766-0001.wav + |-- 1221-135766-0002.wav + `-- trans.txt + + +**File descriptions**: + - ``data/lang_bpe_500/HLG.pt`` + + It is the decoding graph. + + - ``data/lang_bpe_500/HLG_modified.pt`` + + It uses a modified CTC topology while building HLG. + + - ``data/lang_bpe_500/bpe.model`` + + It is a sentencepiece model. You can use it to reproduce our results. + + - ``data/lang_bpe_500/tokens.txt`` + + It contains tokens and their IDs, generated from ``bpe.model``. + Provided only for convenience so that you can look up the SOS/EOS ID easily. + + - ``data/lang_bpe_500/words.txt`` + + It contains words and their IDs. + + - ``data/lm/G_4_gram.pt`` + + It is a 4-gram LM, used for n-gram LM rescoring. + + - ``exp/pretrained.pt`` + + It contains pre-trained model parameters, obtained by averaging + checkpoints from ``epoch-23.pt`` to ``epoch-77.pt``. + Note: We have removed optimizer ``state_dict`` to reduce file size. + + - ``exp/cpu_jit.pt`` + + It contains torch scripted model that can be deployed in C++. + + - ``test_wavs/*.wav`` + + It contains some test sound files from LibriSpeech ``test-clean`` dataset. + + - ``test_wavs/trans.txt`` + + It contains the reference transcripts for the sound files in ``test_wavs/``. + +The information of the test sound files is listed below: + +.. code-block:: bash + + $ soxi icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/*.wav + + Input File : 'icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:06.62 = 106000 samples ~ 496.875 CDDA sectors + File Size : 212k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + + Input File : 'icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:16.71 = 267440 samples ~ 1253.62 CDDA sectors + File Size : 535k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + + Input File : 'icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.83 = 77200 samples ~ 361.875 CDDA sectors + File Size : 154k + Bit Rate : 256k + Sample Encoding: 16-bit Signed Integer PCM + + Total Duration of 3 files: 00:00:28.16 + +Usage +~~~~~ + +.. code-block:: + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/pretrained.py --help + +displays the help information. + +It supports 4 decoding methods: + + - CTC decoding + - HLG decoding + - HLG + n-gram LM rescoring + - HLG + n-gram LM rescoring + attention decoder rescoring + +CTC decoding +^^^^^^^^^^^^ + +CTC decoding uses the best path of the decoding lattice as the decoding result +without any LM or lexicon. + +The command to run CTC decoding is: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/pretrained.py \ + --checkpoint ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt \ + --bpe-model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/bpe.model \ + --method ctc-decoding \ + --num-classes 500 \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + +The output is given below: + +.. code-block:: + + 2021-11-10 12:12:29,554 INFO [pretrained.py:260] {'sample_rate': 16000, 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 0, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt', 'words_file': None, 'HLG': None, 'bpe_model': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/bpe.model', 'method': 'ctc-decoding', 'G': None, 'num_paths': 100, 'ngram_lm_scale': 1.3, 'attention_decoder_scale': 1.2, 'nbest_scale': 0.5, 'sos_id': 1, 'num_classes': 500, 'eos_id': 1, 'sound_files': ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'], 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '7178d67e594bc7fa89c2b331ad7bd1c62a6a9eb4', 'k2-git-date': 'Tue Oct 26 22:12:54 2021', 'lhotse-version': '0.11.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'bpe-500', 'icefall-git-sha1': '8d93169-dirty', 'icefall-git-date': 'Wed Nov 10 11:52:44 2021', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-fix', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-bpe-500/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-bpe-500/lhotse/__init__.py'}} + 2021-11-10 12:12:29,554 INFO [pretrained.py:266] device: cuda:0 + 2021-11-10 12:12:29,554 INFO [pretrained.py:268] Creating model + 2021-11-10 12:12:35,600 INFO [pretrained.py:285] Constructing Fbank computer + 2021-11-10 12:12:35,601 INFO [pretrained.py:295] Reading sound files: ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'] + 2021-11-10 12:12:35,758 INFO [pretrained.py:301] Decoding started + 2021-11-10 12:12:36,025 INFO [pretrained.py:319] Use CTC decoding + 2021-11-10 12:12:36,204 INFO [pretrained.py:425] + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav: + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROFFELS + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav: + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED B + OSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav: + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + + 2021-11-10 12:12:36,204 INFO [pretrained.py:427] Decoding Done + +HLG decoding +^^^^^^^^^^^^ + +HLG decoding uses the best path of the decoding lattice as the decoding result. + +The command to run HLG decoding is: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/pretrained.py \ + --checkpoint ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt \ + --words-file ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \ + --method 1best \ + --num-classes 500 \ + --HLG ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + +The output is given below: + +.. code-block:: + + 2021-11-10 13:33:03,723 INFO [pretrained.py:260] {'sample_rate': 16000, 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 0, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt', 'words_file': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt', 'HLG': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt', 'bpe_model': None, 'method': '1best', 'G': None, 'num_paths': 100, 'ngram_lm_scale': 1.3, 'attention_decoder_scale': 1.2, 'nbest_scale': 0.5, 'sos_id': 1, 'num_classes': 500, 'eos_id': 1, 'sound_files': ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'], 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '7178d67e594bc7fa89c2b331ad7bd1c62a6a9eb4', 'k2-git-date': 'Tue Oct 26 22:12:54 2021', 'lhotse-version': '0.11.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'bpe-500', 'icefall-git-sha1': '8d93169-dirty', 'icefall-git-date': 'Wed Nov 10 11:52:44 2021', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-fix', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-bpe-500/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-bpe-500/lhotse/__init__.py'}} + 2021-11-10 13:33:03,723 INFO [pretrained.py:266] device: cuda:0 + 2021-11-10 13:33:03,723 INFO [pretrained.py:268] Creating model + 2021-11-10 13:33:09,775 INFO [pretrained.py:285] Constructing Fbank computer + 2021-11-10 13:33:09,776 INFO [pretrained.py:295] Reading sound files: ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'] + 2021-11-10 13:33:09,881 INFO [pretrained.py:301] Decoding started + 2021-11-10 13:33:09,951 INFO [pretrained.py:352] Loading HLG from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt + 2021-11-10 13:33:13,234 INFO [pretrained.py:384] Use HLG decoding + 2021-11-10 13:33:13,571 INFO [pretrained.py:425] + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav: + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav: + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav: + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + + 2021-11-10 13:33:13,571 INFO [pretrained.py:427] Decoding Done + + +HLG decoding + LM rescoring +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +It uses an n-gram LM to rescore the decoding lattice and the best +path of the rescored lattice is the decoding result. + +The command to run HLG decoding + LM rescoring is: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + ./conformer_ctc/pretrained.py \ + --checkpoint ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt \ + --words-file ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \ + --method whole-lattice-rescoring \ + --num-classes 500 \ + --HLG ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \ + --G ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt \ + --ngram-lm-scale 1.0 \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + +Its output is: + +.. code-block:: + + 2021-11-10 13:39:55,857 INFO [pretrained.py:260] {'sample_rate': 16000, 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 0, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt', 'words_file': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt', 'HLG': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt', 'bpe_model': None, 'method': 'whole-lattice-rescoring', 'G': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt', 'num_paths': 100, 'ngram_lm_scale': 1.0, 'attention_decoder_scale': 1.2, 'nbest_scale': 0.5, 'sos_id': 1, 'num_classes': 500, 'eos_id': 1, 'sound_files': ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'], 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-$it-sha1': '7178d67e594bc7fa89c2b331ad7bd1c62a6a9eb4', 'k2-git-date': 'Tue Oct 26 22:12:54 2021', 'lhotse-version': '0.11.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'bpe-500', 'icefall-git-sha1': '8d93169-dirty', 'icefall-git-date': 'Wed Nov 10 11:52:44 2021', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-fix', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-bpe-500/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-bpe-500/lhotse/__init__.py'}} + 2021-11-10 13:39:55,858 INFO [pretrained.py:266] device: cuda:0 + 2021-11-10 13:39:55,858 INFO [pretrained.py:268] Creating model + 2021-11-10 13:40:01,979 INFO [pretrained.py:285] Constructing Fbank computer + 2021-11-10 13:40:01,980 INFO [pretrained.py:295] Reading sound files: ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'] + 2021-11-10 13:40:02,055 INFO [pretrained.py:301] Decoding started + 2021-11-10 13:40:02,117 INFO [pretrained.py:352] Loading HLG from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt + 2021-11-10 13:40:05,051 INFO [pretrained.py:363] Loading G from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt + 2021-11-10 13:40:18,959 INFO [pretrained.py:389] Use HLG decoding + LM rescoring + 2021-11-10 13:40:19,546 INFO [pretrained.py:425] + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav: + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav: + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav: + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + + 2021-11-10 13:40:19,546 INFO [pretrained.py:427] Decoding Done + + +HLG decoding + LM rescoring + attention decoder rescoring +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +It uses an n-gram LM to rescore the decoding lattice, extracts +n paths from the rescored lattice, recores the extracted paths with +an attention decoder. The path with the highest score is the decoding result. + +The command to run HLG decoding + LM rescoring + attention decoder rescoring is: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./conformer_ctc/pretrained.py \ + --checkpoint ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt \ + --words-file ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \ + --method attention-decoder \ + --num-classes 500 \ + --HLG ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \ + --G ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt \ + --ngram-lm-scale 2.0 \ + --attention-decoder-scale 2.0 \ + --nbest-scale 0.5 \ + --num-paths 100 \ + --sos-id 1 \ + --eos-id 1 \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + +The output is below: + +.. code-block:: + + 2021-11-10 13:43:45,598 INFO [pretrained.py:260] {'sample_rate': 16000, 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 6, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt', 'words_file': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt', 'HLG': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt', 'bpe_model': None, 'method': 'attention-decoder', 'G': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt', 'num_paths': 100, 'ngram_lm_scale': 2.0, 'attention_decoder_scale': 2.0, 'nbest_scale': 0.5, 'sos_id': 1, 'num_classes': 500, 'eos_id': 1, 'sound_files': ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'], 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '7178d67e594bc7fa89c2b331ad7bd1c62a6a9eb4', 'k2-git-date': 'Tue Oct 26 22:12:54 2021', 'lhotse-version': '0.11.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'bpe-500', 'icefall-git-sha1': '8d93169-dirty', 'icefall-git-date': 'Wed Nov 10 11:52:44 2021', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-fix', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-bpe-500/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-bpe-500/lhotse/__init__.py'}} + 2021-11-10 13:43:45,599 INFO [pretrained.py:266] device: cuda:0 + 2021-11-10 13:43:45,599 INFO [pretrained.py:268] Creating model + 2021-11-10 13:43:51,833 INFO [pretrained.py:285] Constructing Fbank computer + 2021-11-10 13:43:51,834 INFO [pretrained.py:295] Reading sound files: ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'] + 2021-11-10 13:43:51,915 INFO [pretrained.py:301] Decoding started + 2021-11-10 13:43:52,076 INFO [pretrained.py:352] Loading HLG from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt + 2021-11-10 13:43:55,110 INFO [pretrained.py:363] Loading G from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt + 2021-11-10 13:44:09,329 INFO [pretrained.py:397] Use HLG + LM rescoring + attention decoder rescoring + 2021-11-10 13:44:10,192 INFO [pretrained.py:425] + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav: + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav: + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav: + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + + 2021-11-10 13:44:10,192 INFO [pretrained.py:427] Decoding Done + + +Compute WER with the pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To check the WER of the pre-trained model on the test datasets, run: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ cd icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/ + $ ln -s pretrained.pt epoch-999.pt + $ cd ../.. + $ ./conformer_ctc/decode.py \ + --exp-dir ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp \ + --lang-dir ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500 \ + --lm-dir ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm \ + --epoch 999 \ + --avg 1 \ + --concatenate-cuts 0 \ + --bucketing-sampler 1 \ + --max-duration 30 \ + --num-paths 1000 \ + --method attention-decoder \ + --nbest-scale 0.5 + + +Colab notebook +-------------- + +We do provide a colab notebook for this recipe showing how to use a pre-trained model. + +|librispeech asr conformer ctc colab notebook| + +.. |librispeech asr conformer ctc colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/1huyupXAcHsUrKaWfI83iMEJ6J0Nh0213?usp=sharing + +.. HINT:: + + Due to limited memory provided by Colab, you have to upgrade to Colab Pro to + run ``HLG decoding + LM rescoring`` and + ``HLG decoding + LM rescoring + attention decoder rescoring``. + Otherwise, you can only run ``HLG decoding`` with Colab. + +**Congratulations!** You have finished the LibriSpeech ASR recipe with +conformer CTC models in ``icefall``. + +If you want to deploy your trained model in C++, please read the following section. + +Deployment with C++ +------------------- + +This section describes how to deploy the pre-trained model in C++, without +Python dependencies. + +.. HINT:: + + At present, it does NOT support streaming decoding. + +First, let us compile k2 from source: + +.. code-block:: bash + + $ cd $HOME + $ git clone https://github.com/k2-fsa/k2 + $ cd k2 + $ git checkout v2.0-pre + +.. CAUTION:: + + You have to switch to the branch ``v2.0-pre``! + +.. code-block:: bash + + $ mkdir build-release + $ cd build-release + $ cmake -DCMAKE_BUILD_TYPE=Release .. + $ make -j ctc_decode hlg_decode ngram_lm_rescore attention_rescore + + # You will find four binaries in `./bin`, i.e., + # ./bin/ctc_decode, ./bin/hlg_decode, + # ./bin/ngram_lm_rescore, and ./bin/attention_rescore + +Now you are ready to go! + +Assume you have run: + + .. code-block:: bash + + $ cd k2/build-release + $ ln -s /path/to/icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09 ./ + +To view the usage of ``./bin/ctc_decode``, run: + +.. code-block:: + + $ ./bin/ctc_decode + +It will show you the following message: + +.. code-block:: bash + + Please provide --nn_model + + This file implements decoding with a CTC topology, without any + kinds of LM or lexicons. + + Usage: + ./bin/ctc_decode \ + --use_gpu true \ + --nn_model \ + --bpe_model \ + \ + \ + + + To see all possible options, use + ./bin/ctc_decode --help + + Caution: + - Only sound files (*.wav) with single channel are supported. + - It assumes the model is conformer_ctc/transformer.py from icefall. + If you use a different model, you have to change the code + related to `model.forward` in this file. + + +CTC decoding +^^^^^^^^^^^^ + +.. code-block:: bash + + ./bin/ctc_decode \ + --use_gpu true \ + --nn_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/cpu_jit.pt \ + --bpe_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/bpe.model \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + +Its output is: + +.. code-block:: + + 2021-11-10 13:57:55.316 [I] k2/torch/bin/ctc_decode.cu:105:int main(int, char**) Use GPU + 2021-11-10 13:57:55.316 [I] k2/torch/bin/ctc_decode.cu:109:int main(int, char**) Device: cuda:0 + 2021-11-10 13:57:55.316 [I] k2/torch/bin/ctc_decode.cu:118:int main(int, char**) Load wave files + 2021-11-10 13:58:01.221 [I] k2/torch/bin/ctc_decode.cu:125:int main(int, char**) Build Fbank computer + 2021-11-10 13:58:01.222 [I] k2/torch/bin/ctc_decode.cu:136:int main(int, char**) Compute features + 2021-11-10 13:58:01.228 [I] k2/torch/bin/ctc_decode.cu:144:int main(int, char**) Load neural network model + 2021-11-10 13:58:02.19 [I] k2/torch/bin/ctc_decode.cu:159:int main(int, char**) Compute nnet_output + 2021-11-10 13:58:02.543 [I] k2/torch/bin/ctc_decode.cu:174:int main(int, char**) Build CTC topo + 2021-11-10 13:58:02.547 [I] k2/torch/bin/ctc_decode.cu:177:int main(int, char**) Decoding + 2021-11-10 13:58:02.708 [I] k2/torch/bin/ctc_decode.cu:207:int main(int, char**) + Decoding result: + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROFFELS + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + +HLG decoding +^^^^^^^^^^^^ + +.. code-block:: bash + + ./bin/hlg_decode \ + --use_gpu true \ + --nn_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/cpu_jit.pt \ + --hlg ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \ + --word_table ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + +The output is: + +.. code-block:: + + 2021-11-10 13:59:04.729 [I] k2/torch/bin/hlg_decode.cu:111:int main(int, char**) Use GPU + 2021-11-10 13:59:04.729 [I] k2/torch/bin/hlg_decode.cu:115:int main(int, char**) Device: cuda:0 + 2021-11-10 13:59:04.729 [I] k2/torch/bin/hlg_decode.cu:124:int main(int, char**) Load wave files + 2021-11-10 13:59:10.702 [I] k2/torch/bin/hlg_decode.cu:131:int main(int, char**) Build Fbank computer + 2021-11-10 13:59:10.703 [I] k2/torch/bin/hlg_decode.cu:142:int main(int, char**) Compute features + 2021-11-10 13:59:10.707 [I] k2/torch/bin/hlg_decode.cu:150:int main(int, char**) Load neural network model + 2021-11-10 13:59:11.545 [I] k2/torch/bin/hlg_decode.cu:165:int main(int, char**) Compute nnet_output + 2021-11-10 13:59:12.72 [I] k2/torch/bin/hlg_decode.cu:180:int main(int, char**) Load ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt + 2021-11-10 13:59:12.994 [I] k2/torch/bin/hlg_decode.cu:185:int main(int, char**) Decoding + 2021-11-10 13:59:13.268 [I] k2/torch/bin/hlg_decode.cu:216:int main(int, char**) + Decoding result: + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + + +HLG decoding + n-gram LM rescoring +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: bash + + ./bin/ngram_lm_rescore \ + --use_gpu true \ + --nn_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/cpu_jit.pt \ + --hlg ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \ + --g ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt \ + --ngram_lm_scale 1.0 \ + --word_table ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + +The output is: + +.. code-block:: + + 2021-11-10 14:00:55.279 [I] k2/torch/bin/ngram_lm_rescore.cu:122:int main(int, char**) Use GPU + 2021-11-10 14:00:55.280 [I] k2/torch/bin/ngram_lm_rescore.cu:126:int main(int, char**) Device: cuda:0 + 2021-11-10 14:00:55.280 [I] k2/torch/bin/ngram_lm_rescore.cu:135:int main(int, char**) Load wave files + 2021-11-10 14:01:01.214 [I] k2/torch/bin/ngram_lm_rescore.cu:142:int main(int, char**) Build Fbank computer + 2021-11-10 14:01:01.215 [I] k2/torch/bin/ngram_lm_rescore.cu:153:int main(int, char**) Compute features + 2021-11-10 14:01:01.219 [I] k2/torch/bin/ngram_lm_rescore.cu:161:int main(int, char**) Load neural network model + 2021-11-10 14:01:01.945 [I] k2/torch/bin/ngram_lm_rescore.cu:176:int main(int, char**) Compute nnet_output + 2021-11-10 14:01:02.475 [I] k2/torch/bin/ngram_lm_rescore.cu:191:int main(int, char**) Load ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt + 2021-11-10 14:01:03.398 [I] k2/torch/bin/ngram_lm_rescore.cu:199:int main(int, char**) Decoding + 2021-11-10 14:01:03.515 [I] k2/torch/bin/ngram_lm_rescore.cu:205:int main(int, char**) Load n-gram LM: ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt + 2021-11-10 14:01:07.432 [W] k2/torch/csrc/deserialization.cu:441:k2::FsaClass k2::LoadFsa(const string&, c10::optional) + Ignore non tensor attribute: 'dummy' of type: Int + 2021-11-10 14:01:07.589 [I] k2/torch/bin/ngram_lm_rescore.cu:214:int main(int, char**) Rescore with an n-gram LM + 2021-11-10 14:01:08.68 [I] k2/torch/bin/ngram_lm_rescore.cu:242:int main(int, char**) + Decoding result: + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + + +HLG decoding + n-gram LM rescoring + attention decoder rescoring +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: bash + + ./bin/attention_rescore \ + --use_gpu true \ + --nn_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/cpu_jit.pt \ + --hlg ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \ + --g ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt \ + --ngram_lm_scale 2.0 \ + --attention_scale 2.0 \ + --num_paths 100 \ + --nbest_scale 0.5 \ + --word_table ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \ + --sos_id 1 \ + --eos_id 1 \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \ + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + +The output is: + +.. code-block:: + + 2021-11-10 14:02:43.656 [I] k2/torch/bin/attention_rescore.cu:149:int main(int, char**) Use GPU + 2021-11-10 14:02:43.656 [I] k2/torch/bin/attention_rescore.cu:153:int main(int, char**) Device: cuda:0 + 2021-11-10 14:02:43.656 [I] k2/torch/bin/attention_rescore.cu:162:int main(int, char**) Load wave files + 2021-11-10 14:02:49.216 [I] k2/torch/bin/attention_rescore.cu:169:int main(int, char**) Build Fbank computer + 2021-11-10 14:02:49.217 [I] k2/torch/bin/attention_rescore.cu:180:int main(int, char**) Compute features + 2021-11-10 14:02:49.222 [I] k2/torch/bin/attention_rescore.cu:188:int main(int, char**) Load neural network model + 2021-11-10 14:02:49.984 [I] k2/torch/bin/attention_rescore.cu:203:int main(int, char**) Compute nnet_output + 2021-11-10 14:02:50.624 [I] k2/torch/bin/attention_rescore.cu:220:int main(int, char**) Load ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt + 2021-11-10 14:02:51.519 [I] k2/torch/bin/attention_rescore.cu:228:int main(int, char**) Decoding + 2021-11-10 14:02:51.632 [I] k2/torch/bin/attention_rescore.cu:234:int main(int, char**) Load n-gram LM: ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt + 2021-11-10 14:02:55.537 [W] k2/torch/csrc/deserialization.cu:441:k2::FsaClass k2::LoadFsa(const string&, c10::optional) Ignore non tensor attribute: 'dummy' of type: Int + 2021-11-10 14:02:55.645 [I] k2/torch/bin/attention_rescore.cu:243:int main(int, char**) Rescore with an n-gram LM + 2021-11-10 14:02:55.970 [I] k2/torch/bin/attention_rescore.cu:246:int main(int, char**) Sample 100 paths + 2021-11-10 14:02:56.215 [I] k2/torch/bin/attention_rescore.cu:293:int main(int, char**) Run attention decoder + 2021-11-10 14:02:57.35 [I] k2/torch/bin/attention_rescore.cu:303:int main(int, char**) Rescoring + 2021-11-10 14:02:57.179 [I] k2/torch/bin/attention_rescore.cu:369:int main(int, char**) + Decoding result: + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + +There is a Colab notebook showing you how to run a torch scripted model in C++. +Please see |librispeech asr conformer ctc torch script colab notebook| + +.. |librispeech asr conformer ctc torch script colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/1BIGLWzS36isskMXHKcqC9ysN6pspYXs_?usp=sharing diff --git a/_sources/recipes/librispeech/index.rst.txt b/_sources/recipes/librispeech/index.rst.txt new file mode 100644 index 000000000..5fa08ab6b --- /dev/null +++ b/_sources/recipes/librispeech/index.rst.txt @@ -0,0 +1,8 @@ +LibriSpeech +=========== + +.. toctree:: + :maxdepth: 1 + + tdnn_lstm_ctc + conformer_ctc diff --git a/_sources/recipes/librispeech/tdnn_lstm_ctc.rst.txt b/_sources/recipes/librispeech/tdnn_lstm_ctc.rst.txt new file mode 100644 index 000000000..ca477fbaa --- /dev/null +++ b/_sources/recipes/librispeech/tdnn_lstm_ctc.rst.txt @@ -0,0 +1,404 @@ +TDNN-LSTM-CTC +============= + +This tutorial shows you how to run a TDNN-LSTM-CTC model with the `LibriSpeech `_ dataset. + + +.. HINT:: + + We assume you have read the page :ref:`install icefall` and have setup + the environment for ``icefall``. + + +Data preparation +---------------- + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./prepare.sh + +The script ``./prepare.sh`` handles the data preparation for you, **automagically**. +All you need to do is to run it. + +The data preparation contains several stages, you can use the following two +options: + + - ``--stage`` + - ``--stop-stage`` + +to control which stage(s) should be run. By default, all stages are executed. + + +For example, + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./prepare.sh --stage 0 --stop-stage 0 + +means to run only stage 0. + +To run stage 2 to stage 5, use: + +.. code-block:: bash + + $ ./prepare.sh --stage 2 --stop-stage 5 + +We provide the following YouTube video showing how to run ``./prepare.sh``. + +.. note:: + + To get the latest news of `next-gen Kaldi `_, please subscribe + the following YouTube channel by `Nadira Povey `_: + + ``_ + +.. youtube:: ofEIoJL-mGM + +Training +-------- + +Now describing the training of TDNN-LSTM-CTC model, contained in +the `tdnn_lstm_ctc `_ +folder. + +The command to run the training part is: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ export CUDA_VISIBLE_DEVICES="0,1,2,3" + $ ./tdnn_lstm_ctc/train.py --world-size 4 + +By default, it will run ``20`` epochs. Training logs and checkpoints are saved +in ``tdnn_lstm_ctc/exp``. + +In ``tdnn_lstm_ctc/exp``, you will find the following files: + + - ``epoch-0.pt``, ``epoch-1.pt``, ..., ``epoch-19.pt`` + + These are checkpoint files, containing model ``state_dict`` and optimizer ``state_dict``. + To resume training from some checkpoint, say ``epoch-10.pt``, you can use: + + .. code-block:: bash + + $ ./tdnn_lstm_ctc/train.py --start-epoch 11 + + - ``tensorboard/`` + + This folder contains TensorBoard logs. Training loss, validation loss, learning + rate, etc, are recorded in these logs. You can visualize them by: + + .. code-block:: bash + + $ cd tdnn_lstm_ctc/exp/tensorboard + $ tensorboard dev upload --logdir . --description "TDNN LSTM training for librispeech with icefall" + + - ``log/log-train-xxxx`` + + It is the detailed training log in text format, same as the one + you saw printed to the console during training. + + +To see available training options, you can use: + +.. code-block:: bash + + $ ./tdnn_lstm_ctc/train.py --help + +Other training options, e.g., learning rate, results dir, etc., are +pre-configured in the function ``get_params()`` +in `tdnn_lstm_ctc/train.py `_. +Normally, you don't need to change them. You can change them by modifying the code, if +you want. + +Decoding +-------- + +The decoding part uses checkpoints saved by the training part, so you have +to run the training part first. + +The command for decoding is: + +.. code-block:: bash + + $ export CUDA_VISIBLE_DEVICES="0" + $ ./tdnn_lstm_ctc/decode.py + +You will see the WER in the output log. + +Decoded results are saved in ``tdnn_lstm_ctc/exp``. + +.. code-block:: bash + + $ ./tdnn_lstm_ctc/decode.py --help + +shows you the available decoding options. + +Some commonly used options are: + + - ``--epoch`` + + You can select which checkpoint to be used for decoding. + For instance, ``./tdnn_lstm_ctc/decode.py --epoch 10`` means to use + ``./tdnn_lstm_ctc/exp/epoch-10.pt`` for decoding. + + - ``--avg`` + + It's related to model averaging. It specifies number of checkpoints + to be averaged. The averaged model is used for decoding. + For example, the following command: + + .. code-block:: bash + + $ ./tdnn_lstm_ctc/decode.py --epoch 10 --avg 3 + + uses the average of ``epoch-8.pt``, ``epoch-9.pt`` and ``epoch-10.pt`` + for decoding. + + - ``--export`` + + If it is ``True``, i.e., ``./tdnn_lstm_ctc/decode.py --export 1``, the code + will save the averaged model to ``tdnn_lstm_ctc/exp/pretrained.pt``. + See :ref:`tdnn_lstm_ctc use a pre-trained model` for how to use it. + + +.. _tdnn_lstm_ctc use a pre-trained model: + +Pre-trained Model +----------------- + +We have uploaded the pre-trained model to +``_. + +The following shows you how to use the pre-trained model. + + +Install kaldifeat +~~~~~~~~~~~~~~~~~ + +`kaldifeat `_ is used to +extract features for a single sound file or multiple sound files +at the same time. + +Please refer to ``_ for installation. + +Download the pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ mkdir tmp + $ cd tmp + $ git lfs install + $ git clone https://huggingface.co/pkufool/icefall_asr_librispeech_tdnn-lstm_ctc + +.. CAUTION:: + + You have to use ``git lfs`` to download the pre-trained model. + +.. CAUTION:: + + In order to use this pre-trained model, your k2 version has to be v1.7 or later. + +After downloading, you will have the following files: + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ tree tmp + +.. code-block:: bash + + tmp/ + `-- icefall_asr_librispeech_tdnn-lstm_ctc + |-- README.md + |-- data + | |-- lang_phone + | | |-- HLG.pt + | | |-- tokens.txt + | | `-- words.txt + | `-- lm + | `-- G_4_gram.pt + |-- exp + | `-- pretrained.pt + `-- test_wavs + |-- 1089-134686-0001.flac + |-- 1221-135766-0001.flac + |-- 1221-135766-0002.flac + `-- trans.txt + + 6 directories, 10 files + +**File descriptions**: + + - ``data/lang_phone/HLG.pt`` + + It is the decoding graph. + + - ``data/lang_phone/tokens.txt`` + + It contains tokens and their IDs. + + - ``data/lang_phone/words.txt`` + + It contains words and their IDs. + + - ``data/lm/G_4_gram.pt`` + + It is a 4-gram LM, useful for LM rescoring. + + - ``exp/pretrained.pt`` + + It contains pre-trained model parameters, obtained by averaging + checkpoints from ``epoch-14.pt`` to ``epoch-19.pt``. + Note: We have removed optimizer ``state_dict`` to reduce file size. + + - ``test_waves/*.flac`` + + It contains some test sound files from LibriSpeech ``test-clean`` dataset. + + - ``test_waves/trans.txt`` + + It contains the reference transcripts for the sound files in ``test_waves/``. + +The information of the test sound files is listed below: + +.. code-block:: bash + + $ soxi tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/*.flac + + Input File : 'tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:06.62 = 106000 samples ~ 496.875 CDDA sectors + File Size : 116k + Bit Rate : 140k + Sample Encoding: 16-bit FLAC + + + Input File : 'tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:16.71 = 267440 samples ~ 1253.62 CDDA sectors + File Size : 343k + Bit Rate : 164k + Sample Encoding: 16-bit FLAC + + + Input File : 'tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac' + Channels : 1 + Sample Rate : 16000 + Precision : 16-bit + Duration : 00:00:04.83 = 77200 samples ~ 361.875 CDDA sectors + File Size : 105k + Bit Rate : 174k + Sample Encoding: 16-bit FLAC + + Total Duration of 3 files: 00:00:28.16 + + +Inference with a pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/librispeech/ASR + $ ./tdnn_lstm_ctc/pretrained.py --help + +shows the usage information of ``./tdnn_lstm_ctc/pretrained.py``. + +To decode with ``1best`` method, we can use: + +.. code-block:: bash + + ./tdnn_lstm_ctc/pretrained.py \ + --checkpoint ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/exp/pretraind.pt \ + --words-file ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/words.txt \ + --HLG ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/HLG.pt \ + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac \ + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac \ + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac + +The output is: + +.. code-block:: + + 2021-08-24 16:57:13,315 INFO [pretrained.py:168] device: cuda:0 + 2021-08-24 16:57:13,315 INFO [pretrained.py:170] Creating model + 2021-08-24 16:57:18,331 INFO [pretrained.py:182] Loading HLG from ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/HLG.pt + 2021-08-24 16:57:27,581 INFO [pretrained.py:199] Constructing Fbank computer + 2021-08-24 16:57:27,584 INFO [pretrained.py:209] Reading sound files: ['./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac', './tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac', './tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac'] + 2021-08-24 16:57:27,599 INFO [pretrained.py:215] Decoding started + 2021-08-24 16:57:27,791 INFO [pretrained.py:245] Use HLG decoding + 2021-08-24 16:57:28,098 INFO [pretrained.py:266] + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac: + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS + + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac: + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac: + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + + + 2021-08-24 16:57:28,099 INFO [pretrained.py:268] Decoding Done + + +To decode with ``whole-lattice-rescoring`` methond, you can use + +.. code-block:: bash + + ./tdnn_lstm_ctc/pretrained.py \ + --checkpoint ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/exp/pretraind.pt \ + --words-file ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/words.txt \ + --HLG ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/HLG.pt \ + --method whole-lattice-rescoring \ + --G ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lm/G_4_gram.pt \ + --ngram-lm-scale 0.8 \ + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac \ + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac \ + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac + +The decoding output is: + +.. code-block:: + + 2021-08-24 16:39:24,725 INFO [pretrained.py:168] device: cuda:0 + 2021-08-24 16:39:24,725 INFO [pretrained.py:170] Creating model + 2021-08-24 16:39:29,403 INFO [pretrained.py:182] Loading HLG from ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/HLG.pt + 2021-08-24 16:39:40,631 INFO [pretrained.py:190] Loading G from ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lm/G_4_gram.pt + 2021-08-24 16:39:53,098 INFO [pretrained.py:199] Constructing Fbank computer + 2021-08-24 16:39:53,107 INFO [pretrained.py:209] Reading sound files: ['./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac', './tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac', './tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac'] + 2021-08-24 16:39:53,121 INFO [pretrained.py:215] Decoding started + 2021-08-24 16:39:53,443 INFO [pretrained.py:250] Use HLG decoding + LM rescoring + 2021-08-24 16:39:54,010 INFO [pretrained.py:266] + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac: + AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS + + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac: + GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN + + ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac: + YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION + + + 2021-08-24 16:39:54,010 INFO [pretrained.py:268] Decoding Done + + +Colab notebook +-------------- + +We provide a colab notebook for decoding with pre-trained model. + +|librispeech tdnn_lstm_ctc colab notebook| + +.. |librispeech tdnn_lstm_ctc colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/1kNmDXNMwREi0rZGAOIAOJo93REBuOTcd + + +**Congratulations!** You have finished the TDNN-LSTM-CTC recipe on librispeech in ``icefall``. diff --git a/_sources/recipes/timit/index.rst.txt b/_sources/recipes/timit/index.rst.txt new file mode 100644 index 000000000..17f40cdb7 --- /dev/null +++ b/_sources/recipes/timit/index.rst.txt @@ -0,0 +1,9 @@ +TIMIT +===== + +.. toctree:: + :maxdepth: 1 + + tdnn_ligru_ctc + tdnn_lstm_ctc + diff --git a/_sources/recipes/timit/tdnn_ligru_ctc.rst.txt b/_sources/recipes/timit/tdnn_ligru_ctc.rst.txt new file mode 100644 index 000000000..186420ee7 --- /dev/null +++ b/_sources/recipes/timit/tdnn_ligru_ctc.rst.txt @@ -0,0 +1,406 @@ +TDNN-LiGRU-CTC +============== + +This tutorial shows you how to run a TDNN-LiGRU-CTC model with the `TIMIT `_ dataset. + + +.. HINT:: + + We assume you have read the page :ref:`install icefall` and have setup + the environment for ``icefall``. + + +Data preparation +---------------- + +.. code-block:: bash + + $ cd egs/timit/ASR + $ ./prepare.sh + +The script ``./prepare.sh`` handles the data preparation for you, **automagically**. +All you need to do is to run it. + +The data preparation contains several stages, you can use the following two +options: + + - ``--stage`` + - ``--stop-stage`` + +to control which stage(s) should be run. By default, all stages are executed. + + +For example, + +.. code-block:: bash + + $ cd egs/timit/ASR + $ ./prepare.sh --stage 0 --stop-stage 0 + +means to run only stage 0. + +To run stage 2 to stage 5, use: + +.. code-block:: bash + + $ ./prepare.sh --stage 2 --stop-stage 5 + + +Training +-------- + +Now describing the training of TDNN-LiGRU-CTC model, contained in +the `tdnn_ligru_ctc `_ +folder. + +.. HINT:: + + TIMIT is a very small dataset. So one GPU is enough. + +The command to run the training part is: + +.. code-block:: bash + + $ cd egs/timit/ASR + $ export CUDA_VISIBLE_DEVICES="0" + $ ./tdnn_ligru_ctc/train.py + +By default, it will run ``25`` epochs. Training logs and checkpoints are saved +in ``tdnn_ligru_ctc/exp``. + +In ``tdnn_ligru_ctc/exp``, you will find the following files: + + - ``epoch-0.pt``, ``epoch-1.pt``, ..., ``epoch-29.pt`` + + These are checkpoint files, containing model ``state_dict`` and optimizer ``state_dict``. + To resume training from some checkpoint, say ``epoch-10.pt``, you can use: + + .. code-block:: bash + + $ ./tdnn_ligru_ctc/train.py --start-epoch 11 + + - ``tensorboard/`` + + This folder contains TensorBoard logs. Training loss, validation loss, learning + rate, etc, are recorded in these logs. You can visualize them by: + + .. code-block:: bash + + $ cd tdnn_ligru_ctc/exp/tensorboard + $ tensorboard dev upload --logdir . --description "TDNN ligru training for timit with icefall" + + - ``log/log-train-xxxx`` + + It is the detailed training log in text format, same as the one + you saw printed to the console during training. + + +To see available training options, you can use: + +.. code-block:: bash + + $ ./tdnn_ligru_ctc/train.py --help + +Other training options, e.g., learning rate, results dir, etc., are +pre-configured in the function ``get_params()`` +in `tdnn_ligru_ctc/train.py `_. +Normally, you don't need to change them. You can change them by modifying the code, if +you want. + +Decoding +-------- + +The decoding part uses checkpoints saved by the training part, so you have +to run the training part first. + +The command for decoding is: + +.. code-block:: bash + + $ export CUDA_VISIBLE_DEVICES="0" + $ ./tdnn_ligru_ctc/decode.py + +You will see the WER in the output log. + +Decoded results are saved in ``tdnn_ligru_ctc/exp``. + +.. code-block:: bash + + $ ./tdnn_ligru_ctc/decode.py --help + +shows you the available decoding options. + +Some commonly used options are: + + - ``--epoch`` + + You can select which checkpoint to be used for decoding. + For instance, ``./tdnn_ligru_ctc/decode.py --epoch 10`` means to use + ``./tdnn_ligru_ctc/exp/epoch-10.pt`` for decoding. + + - ``--avg`` + + It's related to model averaging. It specifies number of checkpoints + to be averaged. The averaged model is used for decoding. + For example, the following command: + + .. code-block:: bash + + $ ./tdnn_ligru_ctc/decode.py --epoch 25 --avg 17 + + uses the average of ``epoch-9.pt``, ``epoch-10.pt``, ``epoch-11.pt``, + ``epoch-12.pt``, ``epoch-13.pt``, ``epoch-14.pt``, ``epoch-15.pt``, + ``epoch-16.pt``, ``epoch-17.pt``, ``epoch-18.pt``, ``epoch-19.pt``, + ``epoch-20.pt``, ``epoch-21.pt``, ``epoch-22.pt``, ``epoch-23.pt``, + ``epoch-24.pt`` and ``epoch-25.pt`` + for decoding. + + - ``--export`` + + If it is ``True``, i.e., ``./tdnn_ligru_ctc/decode.py --export 1``, the code + will save the averaged model to ``tdnn_ligru_ctc/exp/pretrained.pt``. + See :ref:`tdnn_ligru_ctc use a pre-trained model` for how to use it. + + +.. _tdnn_ligru_ctc use a pre-trained model: + +Pre-trained Model +----------------- + +We have uploaded the pre-trained model to +``_. + +The following shows you how to use the pre-trained model. + + +Install kaldifeat +~~~~~~~~~~~~~~~~~ + +`kaldifeat `_ is used to +extract features for a single sound file or multiple sound files +at the same time. + +Please refer to ``_ for installation. + +Download the pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/timit/ASR + $ mkdir tmp-ligru + $ cd tmp-ligru + $ git lfs install + $ git clone https://huggingface.co/luomingshuang/icefall_asr_timit_tdnn_ligru_ctc + +.. CAUTION:: + + You have to use ``git lfs`` to download the pre-trained model. + +.. CAUTION:: + + In order to use this pre-trained model, your k2 version has to be v1.7 or later. + +After downloading, you will have the following files: + +.. code-block:: bash + + $ cd egs/timit/ASR + $ tree tmp-ligru + +.. code-block:: bash + + tmp-ligru/ + `-- icefall_asr_timit_tdnn_ligru_ctc + |-- README.md + |-- data + | |-- lang_phone + | | |-- HLG.pt + | | |-- tokens.txt + | | `-- words.txt + | `-- lm + | `-- G_4_gram.pt + |-- exp + | `-- pretrained_average_9_25.pt + `-- test_wavs + |-- FDHC0_SI1559.WAV + |-- FELC0_SI756.WAV + |-- FMGD0_SI1564.WAV + `-- trans.txt + + 6 directories, 10 files + +**File descriptions**: + + - ``data/lang_phone/HLG.pt`` + + It is the decoding graph. + + - ``data/lang_phone/tokens.txt`` + + It contains tokens and their IDs. + + - ``data/lang_phone/words.txt`` + + It contains words and their IDs. + + - ``data/lm/G_4_gram.pt`` + + It is a 4-gram LM, useful for LM rescoring. + + - ``exp/pretrained.pt`` + + It contains pre-trained model parameters, obtained by averaging + checkpoints from ``epoch-9.pt`` to ``epoch-25.pt``. + Note: We have removed optimizer ``state_dict`` to reduce file size. + + - ``test_waves/*.WAV`` + + It contains some test sound files from timit ``TEST`` dataset. + + - ``test_waves/trans.txt`` + + It contains the reference transcripts for the sound files in ``test_waves/``. + +The information of the test sound files is listed below: + +.. code-block:: bash + + $ ffprobe -show_format tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV + + Input #0, nistsphere, from 'tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV': + Metadata: + database_id : TIMIT + database_version: 1.0 + utterance_id : dhc0_si1559 + sample_min : -4176 + sample_max : 5984 + Duration: 00:00:03.40, bitrate: 258 kb/s + Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s + + $ ffprobe -show_format tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV + + Input #0, nistsphere, from 'tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV': + Metadata: + database_id : TIMIT + database_version: 1.0 + utterance_id : elc0_si756 + sample_min : -1546 + sample_max : 1989 + Duration: 00:00:04.19, bitrate: 257 kb/s + Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s + + $ ffprobe -show_format tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV + + Input #0, nistsphere, from 'tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV': + Metadata: + database_id : TIMIT + database_version: 1.0 + utterance_id : mgd0_si1564 + sample_min : -7626 + sample_max : 10573 + Duration: 00:00:04.44, bitrate: 257 kb/s + Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s + + +Inference with a pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/timit/ASR + $ ./tdnn_ligru_ctc/pretrained.py --help + +shows the usage information of ``./tdnn_ligru_ctc/pretrained.py``. + +To decode with ``1best`` method, we can use: + +.. code-block:: bash + + ./tdnn_ligru_ctc/pretrained.py + --method 1best + --checkpoint ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/exp/pretrained_average_9_25.pt + --words-file ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/words.txt + --HLG ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/HLG.pt + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV + +The output is: + +.. code-block:: + + 2021-11-08 20:41:33,660 INFO [pretrained.py:169] device: cuda:0 + 2021-11-08 20:41:33,660 INFO [pretrained.py:171] Creating model + 2021-11-08 20:41:38,680 INFO [pretrained.py:183] Loading HLG from ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/HLG.pt + 2021-11-08 20:41:38,695 INFO [pretrained.py:200] Constructing Fbank computer + 2021-11-08 20:41:38,697 INFO [pretrained.py:210] Reading sound files: ['./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV', './tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV', './tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV'] + 2021-11-08 20:41:38,704 INFO [pretrained.py:216] Decoding started + 2021-11-08 20:41:39,819 INFO [pretrained.py:246] Use HLG decoding + 2021-11-08 20:41:39,829 INFO [pretrained.py:267] + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV: + sil dh ih sh uw ah l iy v iy z ih sil p r aa sil k s ih m ey dx ih sil d w uh dx ih w ih s f iy l ih ng w ih th ih n ih m s eh l f sil jh + + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV: + sil m ih sil t ih r iy s sil s er r ih m ih sil m aa l ih sil k l ey sil r eh sil d w ay sil d aa r sil b ah f sil jh + + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV: + sil hh ah z sil b ih sil g r iy w ah z sil d aw n ih sil b ay s sil n ey sil w eh l f eh n s ih z eh n dh eh r w er sil g r ey z ih ng sil k ae dx l sil + + + 2021-11-08 20:41:39,829 INFO [pretrained.py:269] Decoding Done + + +To decode with ``whole-lattice-rescoring`` methond, you can use + +.. code-block:: bash + + ./tdnn_ligru_ctc/pretrained.py \ + --method whole-lattice-rescoring \ + --checkpoint ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/exp/pretrained_average_9_25.pt \ + --words-file ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/words.txt \ + --HLG ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/HLG.pt \ + --G ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lm/G_4_gram.pt \ + --ngram-lm-scale 0.1 \ + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV + +The decoding output is: + +.. code-block:: + + 2021-11-08 20:37:50,693 INFO [pretrained.py:169] device: cuda:0 + 2021-11-08 20:37:50,693 INFO [pretrained.py:171] Creating model + 2021-11-08 20:37:54,693 INFO [pretrained.py:183] Loading HLG from ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/HLG.pt + 2021-11-08 20:37:54,705 INFO [pretrained.py:191] Loading G from ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lm/G_4_gram.pt + 2021-11-08 20:37:54,714 INFO [pretrained.py:200] Constructing Fbank computer + 2021-11-08 20:37:54,715 INFO [pretrained.py:210] Reading sound files: ['./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV', './tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV', './tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV'] + 2021-11-08 20:37:54,720 INFO [pretrained.py:216] Decoding started + 2021-11-08 20:37:55,808 INFO [pretrained.py:251] Use HLG decoding + LM rescoring + 2021-11-08 20:37:56,348 INFO [pretrained.py:267] + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV: + sil dh ih sh uw ah l iy v iy z ah sil p r aa sil k s ih m ey dx ih sil d w uh dx iy w ih s f iy l iy ng w ih th ih n ih m s eh l f sil jh + + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV: + sil m ih sil t ih r iy l s sil s er r eh m ih sil m aa l ih ng sil k l ey sil r eh sil d w ay sil d aa r sil b ah f sil jh ch + + ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV: + sil hh ah z sil b ih n sil g r iy w ah z sil b aw n ih sil b ay s sil n ey sil w er l f eh n s ih z eh n dh eh r w er sil g r ey z ih ng sil k ae dx l sil + + + 2021-11-08 20:37:56,348 INFO [pretrained.py:269] Decoding Done + + +Colab notebook +-------------- + +We provide a colab notebook for decoding with pre-trained model. + +|timit tdnn_ligru_ctc colab notebook| + +.. |timit tdnn_ligru_ctc colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/11IT-k4HQIgQngXz1uvWsEYktjqQt7Tmb + + +**Congratulations!** You have finished the TDNN-LiGRU-CTC recipe on timit in ``icefall``. diff --git a/_sources/recipes/timit/tdnn_lstm_ctc.rst.txt b/_sources/recipes/timit/tdnn_lstm_ctc.rst.txt new file mode 100644 index 000000000..6f760a9ce --- /dev/null +++ b/_sources/recipes/timit/tdnn_lstm_ctc.rst.txt @@ -0,0 +1,404 @@ +TDNN-LSTM-CTC +============= + +This tutorial shows you how to run a TDNN-LSTM-CTC model with the `TIMIT `_ dataset. + + +.. HINT:: + + We assume you have read the page :ref:`install icefall` and have setup + the environment for ``icefall``. + + +Data preparation +---------------- + +.. code-block:: bash + + $ cd egs/timit/ASR + $ ./prepare.sh + +The script ``./prepare.sh`` handles the data preparation for you, **automagically**. +All you need to do is to run it. + +The data preparation contains several stages, you can use the following two +options: + + - ``--stage`` + - ``--stop-stage`` + +to control which stage(s) should be run. By default, all stages are executed. + + +For example, + +.. code-block:: bash + + $ cd egs/timit/ASR + $ ./prepare.sh --stage 0 --stop-stage 0 + +means to run only stage 0. + +To run stage 2 to stage 5, use: + +.. code-block:: bash + + $ ./prepare.sh --stage 2 --stop-stage 5 + + +Training +-------- + +Now describing the training of TDNN-LSTM-CTC model, contained in +the `tdnn_lstm_ctc `_ +folder. + +.. HINT:: + + TIMIT is a very small dataset. So one GPU for training is enough. + +The command to run the training part is: + +.. code-block:: bash + + $ cd egs/timit/ASR + $ export CUDA_VISIBLE_DEVICES="0" + $ ./tdnn_lstm_ctc/train.py + +By default, it will run ``25`` epochs. Training logs and checkpoints are saved +in ``tdnn_lstm_ctc/exp``. + +In ``tdnn_lstm_ctc/exp``, you will find the following files: + + - ``epoch-0.pt``, ``epoch-1.pt``, ..., ``epoch-29.pt`` + + These are checkpoint files, containing model ``state_dict`` and optimizer ``state_dict``. + To resume training from some checkpoint, say ``epoch-10.pt``, you can use: + + .. code-block:: bash + + $ ./tdnn_lstm_ctc/train.py --start-epoch 11 + + - ``tensorboard/`` + + This folder contains TensorBoard logs. Training loss, validation loss, learning + rate, etc, are recorded in these logs. You can visualize them by: + + .. code-block:: bash + + $ cd tdnn_lstm_ctc/exp/tensorboard + $ tensorboard dev upload --logdir . --description "TDNN LSTM training for timit with icefall" + + - ``log/log-train-xxxx`` + + It is the detailed training log in text format, same as the one + you saw printed to the console during training. + + +To see available training options, you can use: + +.. code-block:: bash + + $ ./tdnn_lstm_ctc/train.py --help + +Other training options, e.g., learning rate, results dir, etc., are +pre-configured in the function ``get_params()`` +in `tdnn_lstm_ctc/train.py `_. +Normally, you don't need to change them. You can change them by modifying the code, if +you want. + +Decoding +-------- + +The decoding part uses checkpoints saved by the training part, so you have +to run the training part first. + +The command for decoding is: + +.. code-block:: bash + + $ export CUDA_VISIBLE_DEVICES="0" + $ ./tdnn_lstm_ctc/decode.py + +You will see the WER in the output log. + +Decoded results are saved in ``tdnn_lstm_ctc/exp``. + +.. code-block:: bash + + $ ./tdnn_lstm_ctc/decode.py --help + +shows you the available decoding options. + +Some commonly used options are: + + - ``--epoch`` + + You can select which checkpoint to be used for decoding. + For instance, ``./tdnn_lstm_ctc/decode.py --epoch 10`` means to use + ``./tdnn_lstm_ctc/exp/epoch-10.pt`` for decoding. + + - ``--avg`` + + It's related to model averaging. It specifies number of checkpoints + to be averaged. The averaged model is used for decoding. + For example, the following command: + + .. code-block:: bash + + $ ./tdnn_lstm_ctc/decode.py --epoch 25 --avg 10 + + uses the average of ``epoch-16.pt``, ``epoch-17.pt``, ``epoch-18.pt``, + ``epoch-19.pt``, ``epoch-20.pt``, ``epoch-21.pt``, ``epoch-22.pt``, + ``epoch-23.pt``, ``epoch-24.pt`` and ``epoch-25.pt`` + for decoding. + + - ``--export`` + + If it is ``True``, i.e., ``./tdnn_lstm_ctc/decode.py --export 1``, the code + will save the averaged model to ``tdnn_lstm_ctc/exp/pretrained.pt``. + See :ref:`tdnn_lstm_ctc use a pre-trained model` for how to use it. + + +.. _tdnn_lstm_ctc use a pre-trained model: + +Pre-trained Model +----------------- + +We have uploaded the pre-trained model to +``_. + +The following shows you how to use the pre-trained model. + + +Install kaldifeat +~~~~~~~~~~~~~~~~~ + +`kaldifeat `_ is used to +extract features for a single sound file or multiple sound files +at the same time. + +Please refer to ``_ for installation. + +Download the pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/timit/ASR + $ mkdir tmp-lstm + $ cd tmp-lstm + $ git lfs install + $ git clone https://huggingface.co/luomingshuang/icefall_asr_timit_tdnn_lstm_ctc + +.. CAUTION:: + + You have to use ``git lfs`` to download the pre-trained model. + +.. CAUTION:: + + In order to use this pre-trained model, your k2 version has to be v1.7 or later. + +After downloading, you will have the following files: + +.. code-block:: bash + + $ cd egs/timit/ASR + $ tree tmp-lstm + +.. code-block:: bash + + tmp-lstm/ + `-- icefall_asr_timit_tdnn_lstm_ctc + |-- README.md + |-- data + | |-- lang_phone + | | |-- HLG.pt + | | |-- tokens.txt + | | `-- words.txt + | `-- lm + | `-- G_4_gram.pt + |-- exp + | `-- pretrained_average_16_25.pt + `-- test_wavs + |-- FDHC0_SI1559.WAV + |-- FELC0_SI756.WAV + |-- FMGD0_SI1564.WAV + `-- trans.txt + + 6 directories, 10 files + +**File descriptions**: + + - ``data/lang_phone/HLG.pt`` + + It is the decoding graph. + + - ``data/lang_phone/tokens.txt`` + + It contains tokens and their IDs. + + - ``data/lang_phone/words.txt`` + + It contains words and their IDs. + + - ``data/lm/G_4_gram.pt`` + + It is a 4-gram LM, useful for LM rescoring. + + - ``exp/pretrained.pt`` + + It contains pre-trained model parameters, obtained by averaging + checkpoints from ``epoch-16.pt`` to ``epoch-25.pt``. + Note: We have removed optimizer ``state_dict`` to reduce file size. + + - ``test_waves/*.WAV`` + + It contains some test sound files from timit ``TEST`` dataset. + + - ``test_waves/trans.txt`` + + It contains the reference transcripts for the sound files in ``test_waves/``. + +The information of the test sound files is listed below: + +.. code-block:: bash + + $ ffprobe -show_format tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV + + Input #0, nistsphere, from 'tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV': + Metadata: + database_id : TIMIT + database_version: 1.0 + utterance_id : dhc0_si1559 + sample_min : -4176 + sample_max : 5984 + Duration: 00:00:03.40, bitrate: 258 kb/s + Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s + + $ ffprobe -show_format tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV + + Input #0, nistsphere, from 'tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV': + Metadata: + database_id : TIMIT + database_version: 1.0 + utterance_id : elc0_si756 + sample_min : -1546 + sample_max : 1989 + Duration: 00:00:04.19, bitrate: 257 kb/s + Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s + + $ ffprobe -show_format tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV + + Input #0, nistsphere, from 'tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV': + Metadata: + database_id : TIMIT + database_version: 1.0 + utterance_id : mgd0_si1564 + sample_min : -7626 + sample_max : 10573 + Duration: 00:00:04.44, bitrate: 257 kb/s + Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s + + +Inference with a pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/timit/ASR + $ ./tdnn_lstm_ctc/pretrained.py --help + +shows the usage information of ``./tdnn_lstm_ctc/pretrained.py``. + +To decode with ``1best`` method, we can use: + +.. code-block:: bash + + ./tdnn_lstm_ctc/pretrained.py + --method 1best + --checkpoint ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/exp/pretrained_average_16_25.pt + --words-file ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/words.txt + --HLG ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/HLG.pt + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV + +The output is: + +.. code-block:: + + 2021-11-08 21:02:49,583 INFO [pretrained.py:169] device: cuda:0 + 2021-11-08 21:02:49,584 INFO [pretrained.py:171] Creating model + 2021-11-08 21:02:53,816 INFO [pretrained.py:183] Loading HLG from ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/HLG.pt + 2021-11-08 21:02:53,827 INFO [pretrained.py:200] Constructing Fbank computer + 2021-11-08 21:02:53,827 INFO [pretrained.py:210] Reading sound files: ['./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV', './tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV', './tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV'] + 2021-11-08 21:02:53,831 INFO [pretrained.py:216] Decoding started + 2021-11-08 21:02:54,380 INFO [pretrained.py:246] Use HLG decoding + 2021-11-08 21:02:54,387 INFO [pretrained.py:267] + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV: + sil dh ih sh uw ah l iy v iy z ih sil p r aa sil k s ih m ey dx ih sil d w uh dx iy w ih s f iy l iy w ih th ih n ih m s eh l f sil jh + + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV: + sil dh ih sil t ih r ih s sil s er r ih m ih sil m aa l ih ng sil k l ey sil r eh sil d w ay sil d aa r sil b ah f sil jh + + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV: + sil hh ae z sil b ih n iy w ah z sil b ae n ih sil b ay s sil n ey sil k eh l f eh n s ih z eh n dh eh r w er sil g r ey z ih ng sil k ae dx l sil + + + 2021-11-08 21:02:54,387 INFO [pretrained.py:269] Decoding Done + + +To decode with ``whole-lattice-rescoring`` methond, you can use + +.. code-block:: bash + + ./tdnn_lstm_ctc/pretrained.py \ + --method whole-lattice-rescoring \ + --checkpoint ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/exp/pretrained_average_16_25.pt \ + --words-file ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/words.txt \ + --HLG ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/HLG.pt \ + --G ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lm/G_4_gram.pt \ + --ngram-lm-scale 0.08 \ + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV + +The decoding output is: + +.. code-block:: + + 2021-11-08 20:05:22,739 INFO [pretrained.py:169] device: cuda:0 + 2021-11-08 20:05:22,739 INFO [pretrained.py:171] Creating model + 2021-11-08 20:05:26,959 INFO [pretrained.py:183] Loading HLG from ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/HLG.pt + 2021-11-08 20:05:26,971 INFO [pretrained.py:191] Loading G from ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lm/G_4_gram.pt + 2021-11-08 20:05:26,977 INFO [pretrained.py:200] Constructing Fbank computer + 2021-11-08 20:05:26,978 INFO [pretrained.py:210] Reading sound files: ['./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV', './tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV', './tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV'] + 2021-11-08 20:05:26,981 INFO [pretrained.py:216] Decoding started + 2021-11-08 20:05:27,519 INFO [pretrained.py:251] Use HLG decoding + LM rescoring + 2021-11-08 20:05:27,878 INFO [pretrained.py:267] + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV: + sil dh ih sh uw l iy v iy z ih sil p r aa sil k s ah m ey dx ih sil w uh dx iy w ih s f iy l ih ng w ih th ih n ih m s eh l f sil jh + + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV: + sil dh ih sil t ih r iy ih s sil s er r eh m ih sil n ah l ih ng sil k l ey sil r eh sil d w ay sil d aa r sil b ow f sil jh + + ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV: + sil hh ah z sil b ih n iy w ah z sil b ae n ih sil b ay s sil n ey sil k ih l f eh n s ih z eh n dh eh r w er sil g r ey z ih n sil k ae dx l sil + + +2021-11-08 20:05:27,878 INFO [pretrained.py:269] Decoding Done + + +Colab notebook +-------------- + +We provide a colab notebook for decoding with pre-trained model. + +|timit tdnn_lstm_ctc colab notebook| + +.. |timit tdnn_lstm_ctc colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/1Hs9DA4V96uapw_30uNp32OMJgkuR5VVd + + +**Congratulations!** You have finished the TDNN-LSTM-CTC recipe on timit in ``icefall``. diff --git a/_sources/recipes/yesno/index.rst.txt b/_sources/recipes/yesno/index.rst.txt new file mode 100644 index 000000000..d68523a97 --- /dev/null +++ b/_sources/recipes/yesno/index.rst.txt @@ -0,0 +1,7 @@ +YesNo +===== + +.. toctree:: + :maxdepth: 1 + + tdnn diff --git a/_sources/recipes/yesno/tdnn.rst.txt b/_sources/recipes/yesno/tdnn.rst.txt new file mode 100644 index 000000000..e8b748e6b --- /dev/null +++ b/_sources/recipes/yesno/tdnn.rst.txt @@ -0,0 +1,445 @@ +TDNN-CTC +======== + +This page shows you how to run the `yesno `_ recipe. It contains: + + - (1) Prepare data for training + - (2) Train a TDNN model + + - (a) View text format logs and visualize TensorBoard logs + - (b) Select device type, i.e., CPU and GPU, for training + - (c) Change training options + - (d) Resume training from a checkpoint + + - (3) Decode with a trained model + + - (a) Select a checkpoint for decoding + - (b) Model averaging + + - (4) Colab notebook + + - (a) It shows you step by step how to setup the environment, how to do training, + and how to do decoding + - (b) How to use a pre-trained model + + - (5) Inference with a pre-trained model + + - (a) Download a pre-trained model, provided by us + - (b) Decode a single sound file with a pre-trained model + - (c) Decode multiple sound files at the same time + +It does **NOT** show you: + + - (1) How to train with multiple GPUs + + The ``yesno`` dataset is so small that CPU is more than enough + for training as well as for decoding. + + - (2) How to use LM rescoring for decoding + + The dataset does not have an LM for rescoring. + +.. HINT:: + + We assume you have read the page :ref:`install icefall` and have setup + the environment for ``icefall``. + +.. HINT:: + + You **don't** need a **GPU** to run this recipe. It can be run on a **CPU**. + The training part takes less than 30 **seconds** on a CPU and you will get + the following WER at the end:: + + [test_set] %WER 0.42% [1 / 240, 0 ins, 1 del, 0 sub ] + +Data preparation +---------------- + +.. code-block:: bash + + $ cd egs/yesno/ASR + $ ./prepare.sh + +The script ``./prepare.sh`` handles the data preparation for you, **automagically**. +All you need to do is to run it. + +The data preparation contains several stages, you can use the following two +options: + + - ``--stage`` + - ``--stop-stage`` + +to control which stage(s) should be run. By default, all stages are executed. + + +For example, + +.. code-block:: bash + + $ cd egs/yesno/ASR + $ ./prepare.sh --stage 0 --stop-stage 0 + +means to run only stage 0. + +To run stage 2 to stage 5, use: + +.. code-block:: bash + + $ ./prepare.sh --stage 2 --stop-stage 5 + + +Training +-------- + +We provide only a TDNN model, contained in +the `tdnn `_ +folder, for ``yesno``. + +The command to run the training part is: + +.. code-block:: bash + + $ cd egs/yesno/ASR + $ export CUDA_VISIBLE_DEVICES="" + $ ./tdnn/train.py + +By default, it will run ``15`` epochs. Training logs and checkpoints are saved +in ``tdnn/exp``. + +In ``tdnn/exp``, you will find the following files: + + - ``epoch-0.pt``, ``epoch-1.pt``, ... + + These are checkpoint files, containing model ``state_dict`` and optimizer ``state_dict``. + To resume training from some checkpoint, say ``epoch-10.pt``, you can use: + + .. code-block:: bash + + $ ./tdnn/train.py --start-epoch 11 + + - ``tensorboard/`` + + This folder contains TensorBoard logs. Training loss, validation loss, learning + rate, etc, are recorded in these logs. You can visualize them by: + + .. code-block:: bash + + $ cd tdnn/exp/tensorboard + $ tensorboard dev upload --logdir . --description "TDNN training for yesno with icefall" + + It will print something like below: + + .. code-block:: + + TensorFlow installation not found - running with reduced feature set. + Upload started and will continue reading any new data as it's added to the logdir. + + To stop uploading, press Ctrl-C. + + New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/yKUbhb5wRmOSXYkId1z9eg/ + + [2021-08-23T23:49:41] Started scanning logdir. + [2021-08-23T23:49:42] Total uploaded: 135 scalars, 0 tensors, 0 binary objects + Listening for new data in logdir... + + Note there is a URL in the above output, click it and you will see + the following screenshot: + + .. figure:: images/tdnn-tensorboard-log.png + :width: 600 + :alt: TensorBoard screenshot + :align: center + :target: https://tensorboard.dev/experiment/yKUbhb5wRmOSXYkId1z9eg/ + + TensorBoard screenshot. + + - ``log/log-train-xxxx`` + + It is the detailed training log in text format, same as the one + you saw printed to the console during training. + + + +.. NOTE:: + + By default, ``./tdnn/train.py`` uses GPU 0 for training if GPUs are available. + If you have two GPUs, say, GPU 0 and GPU 1, and you want to use GPU 1 for + training, you can run: + + .. code-block:: bash + + $ export CUDA_VISIBLE_DEVICES="1" + $ ./tdnn/train.py + + Since the ``yesno`` dataset is very small, containing only 30 sound files + for training, and the model in use is also very small, we use: + + .. code-block:: bash + + $ export CUDA_VISIBLE_DEVICES="" + + so that ``./tdnn/train.py`` uses CPU during training. + + If you don't have GPUs, then you don't need to + run ``export CUDA_VISIBLE_DEVICES=""``. + +To see available training options, you can use: + +.. code-block:: bash + + $ ./tdnn/train.py --help + +Other training options, e.g., learning rate, results dir, etc., are +pre-configured in the function ``get_params()`` +in `tdnn/train.py `_. +Normally, you don't need to change them. You can change them by modifying the code, if +you want. + +Decoding +-------- + +The decoding part uses checkpoints saved by the training part, so you have +to run the training part first. + +The command for decoding is: + +.. code-block:: bash + + $ export CUDA_VISIBLE_DEVICES="" + $ ./tdnn/decode.py + +You will see the WER in the output log. + +Decoded results are saved in ``tdnn/exp``. + +.. code-block:: bash + + $ ./tdnn/decode.py --help + +shows you the available decoding options. + +Some commonly used options are: + + - ``--epoch`` + + You can select which checkpoint to be used for decoding. + For instance, ``./tdnn/decode.py --epoch 10`` means to use + ``./tdnn/exp/epoch-10.pt`` for decoding. + + - ``--avg`` + + It's related to model averaging. It specifies number of checkpoints + to be averaged. The averaged model is used for decoding. + For example, the following command: + + .. code-block:: bash + + $ ./tdnn/decode.py --epoch 10 --avg 3 + + uses the average of ``epoch-8.pt``, ``epoch-9.pt`` and ``epoch-10.pt`` + for decoding. + + - ``--export`` + + If it is ``True``, i.e., ``./tdnn/decode.py --export 1``, the code + will save the averaged model to ``tdnn/exp/pretrained.pt``. + See :ref:`yesno use a pre-trained model` for how to use it. + + +.. _yesno use a pre-trained model: + +Pre-trained Model +----------------- + +We have uploaded the pre-trained model to +``_. + +The following shows you how to use the pre-trained model. + +Download the pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/yesno/ASR + $ mkdir tmp + $ cd tmp + $ git lfs install + $ git clone https://huggingface.co/csukuangfj/icefall_asr_yesno_tdnn + +.. CAUTION:: + + You have to use ``git lfs`` to download the pre-trained model. + +After downloading, you will have the following files: + +.. code-block:: bash + + $ cd egs/yesno/ASR + $ tree tmp + +.. code-block:: bash + + tmp/ + `-- icefall_asr_yesno_tdnn + |-- README.md + |-- lang_phone + | |-- HLG.pt + | |-- L.pt + | |-- L_disambig.pt + | |-- Linv.pt + | |-- lexicon.txt + | |-- lexicon_disambig.txt + | |-- tokens.txt + | `-- words.txt + |-- lm + | |-- G.arpa + | `-- G.fst.txt + |-- pretrained.pt + `-- test_waves + |-- 0_0_0_1_0_0_0_1.wav + |-- 0_0_1_0_0_0_1_0.wav + |-- 0_0_1_0_0_1_1_1.wav + |-- 0_0_1_0_1_0_0_1.wav + |-- 0_0_1_1_0_0_0_1.wav + |-- 0_0_1_1_0_1_1_0.wav + |-- 0_0_1_1_1_0_0_0.wav + |-- 0_0_1_1_1_1_0_0.wav + |-- 0_1_0_0_0_1_0_0.wav + |-- 0_1_0_0_1_0_1_0.wav + |-- 0_1_0_1_0_0_0_0.wav + |-- 0_1_0_1_1_1_0_0.wav + |-- 0_1_1_0_0_1_1_1.wav + |-- 0_1_1_1_0_0_1_0.wav + |-- 0_1_1_1_1_0_1_0.wav + |-- 1_0_0_0_0_0_0_0.wav + |-- 1_0_0_0_0_0_1_1.wav + |-- 1_0_0_1_0_1_1_1.wav + |-- 1_0_1_1_0_1_1_1.wav + |-- 1_0_1_1_1_1_0_1.wav + |-- 1_1_0_0_0_1_1_1.wav + |-- 1_1_0_0_1_0_1_1.wav + |-- 1_1_0_1_0_1_0_0.wav + |-- 1_1_0_1_1_0_0_1.wav + |-- 1_1_0_1_1_1_1_0.wav + |-- 1_1_1_0_0_1_0_1.wav + |-- 1_1_1_0_1_0_1_0.wav + |-- 1_1_1_1_0_0_1_0.wav + |-- 1_1_1_1_1_0_0_0.wav + `-- 1_1_1_1_1_1_1_1.wav + + 4 directories, 42 files + +.. code-block:: bash + + $ soxi tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav + + Input File : 'tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav' + Channels : 1 + Sample Rate : 8000 + Precision : 16-bit + Duration : 00:00:06.76 = 54080 samples ~ 507 CDDA sectors + File Size : 108k + Bit Rate : 128k + Sample Encoding: 16-bit Signed Integer PCM + +- ``0_0_1_0_1_0_0_1.wav`` + + 0 means No; 1 means Yes. No and Yes are not in English, + but in `Hebrew `_. + So this file contains ``NO NO YES NO YES NO NO YES``. + +Download kaldifeat +~~~~~~~~~~~~~~~~~~ + +`kaldifeat `_ is used for extracting +features from a single or multiple sound files. Please refer to +``_ to install ``kaldifeat`` first. + +Inference with a pre-trained model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ cd egs/yesno/ASR + $ ./tdnn/pretrained.py --help + +shows the usage information of ``./tdnn/pretrained.py``. + +To decode a single file, we can use: + +.. code-block:: bash + + ./tdnn/pretrained.py \ + --checkpoint ./tmp/icefall_asr_yesno_tdnn/pretrained.pt \ + --words-file ./tmp/icefall_asr_yesno_tdnn/lang_phone/words.txt \ + --HLG ./tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt \ + ./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav + +The output is: + +.. code-block:: + + 2021-08-24 12:22:51,621 INFO [pretrained.py:119] {'feature_dim': 23, 'num_classes': 4, 'sample_rate': 8000, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './tmp/icefall_asr_yesno_tdnn/pretrained.pt', 'words_file': './tmp/icefall_asr_yesno_tdnn/lang_phone/words.txt', 'HLG': './tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt', 'sound_files': ['./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav']} + 2021-08-24 12:22:51,645 INFO [pretrained.py:125] device: cpu + 2021-08-24 12:22:51,645 INFO [pretrained.py:127] Creating model + 2021-08-24 12:22:51,650 INFO [pretrained.py:139] Loading HLG from ./tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt + 2021-08-24 12:22:51,651 INFO [pretrained.py:143] Constructing Fbank computer + 2021-08-24 12:22:51,652 INFO [pretrained.py:153] Reading sound files: ['./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav'] + 2021-08-24 12:22:51,684 INFO [pretrained.py:159] Decoding started + 2021-08-24 12:22:51,708 INFO [pretrained.py:198] + ./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav: + NO NO YES NO YES NO NO YES + + + 2021-08-24 12:22:51,708 INFO [pretrained.py:200] Decoding Done + +You can see that for the sound file ``0_0_1_0_1_0_0_1.wav``, the decoding result is +``NO NO YES NO YES NO NO YES``. + +To decode **multiple** files at the same time, you can use + +.. code-block:: bash + + ./tdnn/pretrained.py \ + --checkpoint ./tmp/icefall_asr_yesno_tdnn/pretrained.pt \ + --words-file ./tmp/icefall_asr_yesno_tdnn/lang_phone/words.txt \ + --HLG ./tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt \ + ./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav \ + ./tmp/icefall_asr_yesno_tdnn/test_waves/1_0_1_1_0_1_1_1.wav + +The decoding output is: + +.. code-block:: + + 2021-08-24 12:25:20,159 INFO [pretrained.py:119] {'feature_dim': 23, 'num_classes': 4, 'sample_rate': 8000, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './tmp/icefall_asr_yesno_tdnn/pretrained.pt', 'words_file': './tmp/icefall_asr_yesno_tdnn/lang_phone/words.txt', 'HLG': './tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt', 'sound_files': ['./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav', './tmp/icefall_asr_yesno_tdnn/test_waves/1_0_1_1_0_1_1_1.wav']} + 2021-08-24 12:25:20,181 INFO [pretrained.py:125] device: cpu + 2021-08-24 12:25:20,181 INFO [pretrained.py:127] Creating model + 2021-08-24 12:25:20,185 INFO [pretrained.py:139] Loading HLG from ./tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt + 2021-08-24 12:25:20,186 INFO [pretrained.py:143] Constructing Fbank computer + 2021-08-24 12:25:20,187 INFO [pretrained.py:153] Reading sound files: ['./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav', + './tmp/icefall_asr_yesno_tdnn/test_waves/1_0_1_1_0_1_1_1.wav'] + 2021-08-24 12:25:20,213 INFO [pretrained.py:159] Decoding started + 2021-08-24 12:25:20,287 INFO [pretrained.py:198] + ./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav: + NO NO YES NO YES NO NO YES + + ./tmp/icefall_asr_yesno_tdnn/test_waves/1_0_1_1_0_1_1_1.wav: + YES NO YES YES NO YES YES YES + + 2021-08-24 12:25:20,287 INFO [pretrained.py:200] Decoding Done + +You can see again that it decodes correctly. + +Colab notebook +-------------- + +We do provide a colab notebook for this recipe. + +|yesno colab notebook| + +.. |yesno colab notebook| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/drive/1tIjjzaJc3IvGyKiMCDWO-TSnBgkcuN3B?usp=sharing + + +**Congratulations!** You have finished the simplest speech recognition recipe in ``icefall``. diff --git a/_static/README.md b/_static/README.md new file mode 100644 index 000000000..97c1e993c --- /dev/null +++ b/_static/README.md @@ -0,0 +1,4 @@ + +# Introduction + + is used to generate files in this directory. diff --git a/_static/_sphinx_javascript_frameworks_compat.js b/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 000000000..8549469dc --- /dev/null +++ b/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,134 @@ +/* + * _sphinx_javascript_frameworks_compat.js + * ~~~~~~~~~~ + * + * Compatability shim for jQuery and underscores.js. + * + * WILL BE REMOVED IN Sphinx 6.0 + * xref RemovedInSphinx60Warning + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/_static/basic.css b/_static/basic.css new file mode 100644 index 000000000..eeb0519a6 --- /dev/null +++ b/_static/basic.css @@ -0,0 +1,899 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} +a.brackets:before, +span.brackets > a:before{ + content: "["; +} + +a.brackets:after, +span.brackets > a:after { + content: "]"; +} + + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} +dl.footnote > dt, +dl.citation > dt { + float: left; + margin-right: 0.5em; +} + +dl.footnote > dd, +dl.citation > dd { + margin-bottom: 0em; +} + +dl.footnote > dd:after, +dl.citation > dd:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} +dl.field-list > dt:after { + content: ":"; +} + + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/_static/css/badge_only.css b/_static/css/badge_only.css new file mode 100644 index 000000000..e380325bc --- /dev/null +++ b/_static/css/badge_only.css @@ -0,0 +1 @@ +.fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/_static/css/fonts/Roboto-Slab-Bold.woff b/_static/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 000000000..6cb600001 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/_static/css/fonts/Roboto-Slab-Bold.woff2 b/_static/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 000000000..7059e2314 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/_static/css/fonts/Roboto-Slab-Regular.woff b/_static/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 000000000..f815f63f9 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/_static/css/fonts/Roboto-Slab-Regular.woff2 b/_static/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 000000000..f2c76e5bd Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/_static/css/fonts/fontawesome-webfont.eot b/_static/css/fonts/fontawesome-webfont.eot new file mode 100644 index 000000000..e9f60ca95 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.eot differ diff --git a/_static/css/fonts/fontawesome-webfont.svg b/_static/css/fonts/fontawesome-webfont.svg new file mode 100644 index 000000000..855c845e5 --- /dev/null +++ b/_static/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/_static/css/fonts/fontawesome-webfont.ttf b/_static/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 000000000..35acda2fa Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.ttf differ diff --git a/_static/css/fonts/fontawesome-webfont.woff b/_static/css/fonts/fontawesome-webfont.woff new file mode 100644 index 000000000..400014a4b Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.woff differ diff --git a/_static/css/fonts/fontawesome-webfont.woff2 b/_static/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 000000000..4d13fc604 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.woff2 differ diff --git a/_static/css/fonts/lato-bold-italic.woff b/_static/css/fonts/lato-bold-italic.woff new file mode 100644 index 000000000..88ad05b9f Binary files /dev/null and b/_static/css/fonts/lato-bold-italic.woff differ diff --git a/_static/css/fonts/lato-bold-italic.woff2 b/_static/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 000000000..c4e3d804b Binary files /dev/null and b/_static/css/fonts/lato-bold-italic.woff2 differ diff --git a/_static/css/fonts/lato-bold.woff b/_static/css/fonts/lato-bold.woff new file mode 100644 index 000000000..c6dff51f0 Binary files /dev/null and b/_static/css/fonts/lato-bold.woff differ diff --git a/_static/css/fonts/lato-bold.woff2 b/_static/css/fonts/lato-bold.woff2 new file mode 100644 index 000000000..bb195043c Binary files /dev/null and b/_static/css/fonts/lato-bold.woff2 differ diff --git a/_static/css/fonts/lato-normal-italic.woff b/_static/css/fonts/lato-normal-italic.woff new file mode 100644 index 000000000..76114bc03 Binary files /dev/null and b/_static/css/fonts/lato-normal-italic.woff differ diff --git a/_static/css/fonts/lato-normal-italic.woff2 b/_static/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 000000000..3404f37e2 Binary files /dev/null and b/_static/css/fonts/lato-normal-italic.woff2 differ diff --git a/_static/css/fonts/lato-normal.woff b/_static/css/fonts/lato-normal.woff new file mode 100644 index 000000000..ae1307ff5 Binary files /dev/null and b/_static/css/fonts/lato-normal.woff differ diff --git a/_static/css/fonts/lato-normal.woff2 b/_static/css/fonts/lato-normal.woff2 new file mode 100644 index 000000000..3bf984332 Binary files /dev/null and b/_static/css/fonts/lato-normal.woff2 differ diff --git a/_static/css/theme.css b/_static/css/theme.css new file mode 100644 index 000000000..0d9ae7e1a --- /dev/null +++ b/_static/css/theme.css @@ -0,0 +1,4 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,.wy-nav-top a,.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs li{display:inline-block}.wy-breadcrumbs li.wy-breadcrumbs-aside{float:right}.wy-breadcrumbs li a{display:inline-block;padding:5px}.wy-breadcrumbs li a:first-child{padding-left:0}.rst-content .wy-breadcrumbs li tt,.wy-breadcrumbs li .rst-content tt,.wy-breadcrumbs li code{padding:5px;border:none;background:none}.rst-content .wy-breadcrumbs li tt.literal,.wy-breadcrumbs li .rst-content tt.literal,.wy-breadcrumbs li code.literal{color:#404040}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.field-list>dt:after,html.writer-html5 .rst-content dl.footnote>dt:after{content:":"}html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.footnote>dt>span.brackets{margin-right:.5rem}html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{font-style:italic}html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.footnote>dd p,html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{font-size:inherit;line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.field-list)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dl:not(.field-list)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.field-list)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dl:not(.field-list)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel{border:1px solid #7fbbe3;background:#e7f2fa;font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/_static/device-CPU_CUDA-orange.svg b/_static/device-CPU_CUDA-orange.svg new file mode 100644 index 000000000..a023a1283 --- /dev/null +++ b/_static/device-CPU_CUDA-orange.svg @@ -0,0 +1 @@ +device: CPU | CUDAdeviceCPU | CUDA diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 000000000..c3db08d1c --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,264 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + parent.insertBefore( + span, + parent.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.highlightSearchWords(); + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords: () => { + const highlight = + new URLSearchParams(window.location.search).get("highlight") || ""; + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + const url = new URL(window.location); + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + const blacklistedElements = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", + ]); + document.addEventListener("keydown", (event) => { + if (blacklistedElements.has(document.activeElement.tagName)) return; // bail for input elements + if (event.altKey || event.ctrlKey || event.metaKey) return; // bail with special keys + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + case "Escape": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.hideSearchWords(); + event.preventDefault(); + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/_static/documentation_options.js b/_static/documentation_options.js new file mode 100644 index 000000000..cf359c0aa --- /dev/null +++ b/_static/documentation_options.js @@ -0,0 +1,14 @@ +var DOCUMENTATION_OPTIONS = { + URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), + VERSION: '0.1', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 000000000..a858a410e Binary files /dev/null and b/_static/file.png differ diff --git a/_static/jquery-3.6.0.js b/_static/jquery-3.6.0.js new file mode 100644 index 000000000..fc6c299b7 --- /dev/null +++ b/_static/jquery-3.6.0.js @@ -0,0 +1,10881 @@ +/*! + * jQuery JavaScript Library v3.6.0 + * https://jquery.com/ + * + * Includes Sizzle.js + * https://sizzlejs.com/ + * + * Copyright OpenJS Foundation and other contributors + * Released under the MIT license + * https://jquery.org/license + * + * Date: 2021-03-02T17:08Z + */ +( function( global, factory ) { + + "use strict"; + + if ( typeof module === "object" && typeof module.exports === "object" ) { + + // For CommonJS and CommonJS-like environments where a proper `window` + // is present, execute the factory and get jQuery. + // For environments that do not have a `window` with a `document` + // (such as Node.js), expose a factory as module.exports. + // This accentuates the need for the creation of a real `window`. + // e.g. var jQuery = require("jquery")(window); + // See ticket #14549 for more info. + module.exports = global.document ? + factory( global, true ) : + function( w ) { + if ( !w.document ) { + throw new Error( "jQuery requires a window with a document" ); + } + return factory( w ); + }; + } else { + factory( global ); + } + +// Pass this if window is not defined yet +} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { + +// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 +// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode +// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common +// enough that all such attempts are guarded in a try block. +"use strict"; + +var arr = []; + +var getProto = Object.getPrototypeOf; + +var slice = arr.slice; + +var flat = arr.flat ? function( array ) { + return arr.flat.call( array ); +} : function( array ) { + return arr.concat.apply( [], array ); +}; + + +var push = arr.push; + +var indexOf = arr.indexOf; + +var class2type = {}; + +var toString = class2type.toString; + +var hasOwn = class2type.hasOwnProperty; + +var fnToString = hasOwn.toString; + +var ObjectFunctionString = fnToString.call( Object ); + +var support = {}; + +var isFunction = function isFunction( obj ) { + + // Support: Chrome <=57, Firefox <=52 + // In some browsers, typeof returns "function" for HTML elements + // (i.e., `typeof document.createElement( "object" ) === "function"`). + // We don't want to classify *any* DOM node as a function. + // Support: QtWeb <=3.8.5, WebKit <=534.34, wkhtmltopdf tool <=0.12.5 + // Plus for old WebKit, typeof returns "function" for HTML collections + // (e.g., `typeof document.getElementsByTagName("div") === "function"`). (gh-4756) + return typeof obj === "function" && typeof obj.nodeType !== "number" && + typeof obj.item !== "function"; + }; + + +var isWindow = function isWindow( obj ) { + return obj != null && obj === obj.window; + }; + + +var document = window.document; + + + + var preservedScriptAttributes = { + type: true, + src: true, + nonce: true, + noModule: true + }; + + function DOMEval( code, node, doc ) { + doc = doc || document; + + var i, val, + script = doc.createElement( "script" ); + + script.text = code; + if ( node ) { + for ( i in preservedScriptAttributes ) { + + // Support: Firefox 64+, Edge 18+ + // Some browsers don't support the "nonce" property on scripts. + // On the other hand, just using `getAttribute` is not enough as + // the `nonce` attribute is reset to an empty string whenever it + // becomes browsing-context connected. + // See https://github.com/whatwg/html/issues/2369 + // See https://html.spec.whatwg.org/#nonce-attributes + // The `node.getAttribute` check was added for the sake of + // `jQuery.globalEval` so that it can fake a nonce-containing node + // via an object. + val = node[ i ] || node.getAttribute && node.getAttribute( i ); + if ( val ) { + script.setAttribute( i, val ); + } + } + } + doc.head.appendChild( script ).parentNode.removeChild( script ); + } + + +function toType( obj ) { + if ( obj == null ) { + return obj + ""; + } + + // Support: Android <=2.3 only (functionish RegExp) + return typeof obj === "object" || typeof obj === "function" ? + class2type[ toString.call( obj ) ] || "object" : + typeof obj; +} +/* global Symbol */ +// Defining this global in .eslintrc.json would create a danger of using the global +// unguarded in another place, it seems safer to define global only for this module + + + +var + version = "3.6.0", + + // Define a local copy of jQuery + jQuery = function( selector, context ) { + + // The jQuery object is actually just the init constructor 'enhanced' + // Need init if jQuery is called (just allow error to be thrown if not included) + return new jQuery.fn.init( selector, context ); + }; + +jQuery.fn = jQuery.prototype = { + + // The current version of jQuery being used + jquery: version, + + constructor: jQuery, + + // The default length of a jQuery object is 0 + length: 0, + + toArray: function() { + return slice.call( this ); + }, + + // Get the Nth element in the matched element set OR + // Get the whole matched element set as a clean array + get: function( num ) { + + // Return all the elements in a clean array + if ( num == null ) { + return slice.call( this ); + } + + // Return just the one element from the set + return num < 0 ? this[ num + this.length ] : this[ num ]; + }, + + // Take an array of elements and push it onto the stack + // (returning the new matched element set) + pushStack: function( elems ) { + + // Build a new jQuery matched element set + var ret = jQuery.merge( this.constructor(), elems ); + + // Add the old object onto the stack (as a reference) + ret.prevObject = this; + + // Return the newly-formed element set + return ret; + }, + + // Execute a callback for every element in the matched set. + each: function( callback ) { + return jQuery.each( this, callback ); + }, + + map: function( callback ) { + return this.pushStack( jQuery.map( this, function( elem, i ) { + return callback.call( elem, i, elem ); + } ) ); + }, + + slice: function() { + return this.pushStack( slice.apply( this, arguments ) ); + }, + + first: function() { + return this.eq( 0 ); + }, + + last: function() { + return this.eq( -1 ); + }, + + even: function() { + return this.pushStack( jQuery.grep( this, function( _elem, i ) { + return ( i + 1 ) % 2; + } ) ); + }, + + odd: function() { + return this.pushStack( jQuery.grep( this, function( _elem, i ) { + return i % 2; + } ) ); + }, + + eq: function( i ) { + var len = this.length, + j = +i + ( i < 0 ? len : 0 ); + return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] ); + }, + + end: function() { + return this.prevObject || this.constructor(); + }, + + // For internal use only. + // Behaves like an Array's method, not like a jQuery method. + push: push, + sort: arr.sort, + splice: arr.splice +}; + +jQuery.extend = jQuery.fn.extend = function() { + var options, name, src, copy, copyIsArray, clone, + target = arguments[ 0 ] || {}, + i = 1, + length = arguments.length, + deep = false; + + // Handle a deep copy situation + if ( typeof target === "boolean" ) { + deep = target; + + // Skip the boolean and the target + target = arguments[ i ] || {}; + i++; + } + + // Handle case when target is a string or something (possible in deep copy) + if ( typeof target !== "object" && !isFunction( target ) ) { + target = {}; + } + + // Extend jQuery itself if only one argument is passed + if ( i === length ) { + target = this; + i--; + } + + for ( ; i < length; i++ ) { + + // Only deal with non-null/undefined values + if ( ( options = arguments[ i ] ) != null ) { + + // Extend the base object + for ( name in options ) { + copy = options[ name ]; + + // Prevent Object.prototype pollution + // Prevent never-ending loop + if ( name === "__proto__" || target === copy ) { + continue; + } + + // Recurse if we're merging plain objects or arrays + if ( deep && copy && ( jQuery.isPlainObject( copy ) || + ( copyIsArray = Array.isArray( copy ) ) ) ) { + src = target[ name ]; + + // Ensure proper type for the source value + if ( copyIsArray && !Array.isArray( src ) ) { + clone = []; + } else if ( !copyIsArray && !jQuery.isPlainObject( src ) ) { + clone = {}; + } else { + clone = src; + } + copyIsArray = false; + + // Never move original objects, clone them + target[ name ] = jQuery.extend( deep, clone, copy ); + + // Don't bring in undefined values + } else if ( copy !== undefined ) { + target[ name ] = copy; + } + } + } + } + + // Return the modified object + return target; +}; + +jQuery.extend( { + + // Unique for each copy of jQuery on the page + expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ), + + // Assume jQuery is ready without the ready module + isReady: true, + + error: function( msg ) { + throw new Error( msg ); + }, + + noop: function() {}, + + isPlainObject: function( obj ) { + var proto, Ctor; + + // Detect obvious negatives + // Use toString instead of jQuery.type to catch host objects + if ( !obj || toString.call( obj ) !== "[object Object]" ) { + return false; + } + + proto = getProto( obj ); + + // Objects with no prototype (e.g., `Object.create( null )`) are plain + if ( !proto ) { + return true; + } + + // Objects with prototype are plain iff they were constructed by a global Object function + Ctor = hasOwn.call( proto, "constructor" ) && proto.constructor; + return typeof Ctor === "function" && fnToString.call( Ctor ) === ObjectFunctionString; + }, + + isEmptyObject: function( obj ) { + var name; + + for ( name in obj ) { + return false; + } + return true; + }, + + // Evaluates a script in a provided context; falls back to the global one + // if not specified. + globalEval: function( code, options, doc ) { + DOMEval( code, { nonce: options && options.nonce }, doc ); + }, + + each: function( obj, callback ) { + var length, i = 0; + + if ( isArrayLike( obj ) ) { + length = obj.length; + for ( ; i < length; i++ ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } else { + for ( i in obj ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } + + return obj; + }, + + // results is for internal usage only + makeArray: function( arr, results ) { + var ret = results || []; + + if ( arr != null ) { + if ( isArrayLike( Object( arr ) ) ) { + jQuery.merge( ret, + typeof arr === "string" ? + [ arr ] : arr + ); + } else { + push.call( ret, arr ); + } + } + + return ret; + }, + + inArray: function( elem, arr, i ) { + return arr == null ? -1 : indexOf.call( arr, elem, i ); + }, + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + merge: function( first, second ) { + var len = +second.length, + j = 0, + i = first.length; + + for ( ; j < len; j++ ) { + first[ i++ ] = second[ j ]; + } + + first.length = i; + + return first; + }, + + grep: function( elems, callback, invert ) { + var callbackInverse, + matches = [], + i = 0, + length = elems.length, + callbackExpect = !invert; + + // Go through the array, only saving the items + // that pass the validator function + for ( ; i < length; i++ ) { + callbackInverse = !callback( elems[ i ], i ); + if ( callbackInverse !== callbackExpect ) { + matches.push( elems[ i ] ); + } + } + + return matches; + }, + + // arg is for internal usage only + map: function( elems, callback, arg ) { + var length, value, + i = 0, + ret = []; + + // Go through the array, translating each of the items to their new values + if ( isArrayLike( elems ) ) { + length = elems.length; + for ( ; i < length; i++ ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + + // Go through every key on the object, + } else { + for ( i in elems ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + } + + // Flatten any nested arrays + return flat( ret ); + }, + + // A global GUID counter for objects + guid: 1, + + // jQuery.support is not used in Core but other projects attach their + // properties to it so it needs to exist. + support: support +} ); + +if ( typeof Symbol === "function" ) { + jQuery.fn[ Symbol.iterator ] = arr[ Symbol.iterator ]; +} + +// Populate the class2type map +jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ), + function( _i, name ) { + class2type[ "[object " + name + "]" ] = name.toLowerCase(); + } ); + +function isArrayLike( obj ) { + + // Support: real iOS 8.2 only (not reproducible in simulator) + // `in` check used to prevent JIT error (gh-2145) + // hasOwn isn't used here due to false negatives + // regarding Nodelist length in IE + var length = !!obj && "length" in obj && obj.length, + type = toType( obj ); + + if ( isFunction( obj ) || isWindow( obj ) ) { + return false; + } + + return type === "array" || length === 0 || + typeof length === "number" && length > 0 && ( length - 1 ) in obj; +} +var Sizzle = +/*! + * Sizzle CSS Selector Engine v2.3.6 + * https://sizzlejs.com/ + * + * Copyright JS Foundation and other contributors + * Released under the MIT license + * https://js.foundation/ + * + * Date: 2021-02-16 + */ +( function( window ) { +var i, + support, + Expr, + getText, + isXML, + tokenize, + compile, + select, + outermostContext, + sortInput, + hasDuplicate, + + // Local document vars + setDocument, + document, + docElem, + documentIsHTML, + rbuggyQSA, + rbuggyMatches, + matches, + contains, + + // Instance-specific data + expando = "sizzle" + 1 * new Date(), + preferredDoc = window.document, + dirruns = 0, + done = 0, + classCache = createCache(), + tokenCache = createCache(), + compilerCache = createCache(), + nonnativeSelectorCache = createCache(), + sortOrder = function( a, b ) { + if ( a === b ) { + hasDuplicate = true; + } + return 0; + }, + + // Instance methods + hasOwn = ( {} ).hasOwnProperty, + arr = [], + pop = arr.pop, + pushNative = arr.push, + push = arr.push, + slice = arr.slice, + + // Use a stripped-down indexOf as it's faster than native + // https://jsperf.com/thor-indexof-vs-for/5 + indexOf = function( list, elem ) { + var i = 0, + len = list.length; + for ( ; i < len; i++ ) { + if ( list[ i ] === elem ) { + return i; + } + } + return -1; + }, + + booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|" + + "ismap|loop|multiple|open|readonly|required|scoped", + + // Regular expressions + + // http://www.w3.org/TR/css3-selectors/#whitespace + whitespace = "[\\x20\\t\\r\\n\\f]", + + // https://www.w3.org/TR/css-syntax-3/#ident-token-diagram + identifier = "(?:\\\\[\\da-fA-F]{1,6}" + whitespace + + "?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+", + + // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors + attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace + + + // Operator (capture 2) + "*([*^$|!~]?=)" + whitespace + + + // "Attribute values must be CSS identifiers [capture 5] + // or strings [capture 3 or capture 4]" + "*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + + whitespace + "*\\]", + + pseudos = ":(" + identifier + ")(?:\\((" + + + // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments: + // 1. quoted (capture 3; capture 4 or capture 5) + "('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" + + + // 2. simple (capture 6) + "((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" + + + // 3. anything else (capture 2) + ".*" + + ")\\)|)", + + // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter + rwhitespace = new RegExp( whitespace + "+", "g" ), + rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + + whitespace + "+$", "g" ), + + rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), + rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + + "*" ), + rdescend = new RegExp( whitespace + "|>" ), + + rpseudo = new RegExp( pseudos ), + ridentifier = new RegExp( "^" + identifier + "$" ), + + matchExpr = { + "ID": new RegExp( "^#(" + identifier + ")" ), + "CLASS": new RegExp( "^\\.(" + identifier + ")" ), + "TAG": new RegExp( "^(" + identifier + "|[*])" ), + "ATTR": new RegExp( "^" + attributes ), + "PSEUDO": new RegExp( "^" + pseudos ), + "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + + whitespace + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + + whitespace + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), + "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), + + // For use in libraries implementing .is() + // We use this for POS matching in `select` + "needsContext": new RegExp( "^" + whitespace + + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + whitespace + + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) + }, + + rhtml = /HTML$/i, + rinputs = /^(?:input|select|textarea|button)$/i, + rheader = /^h\d$/i, + + rnative = /^[^{]+\{\s*\[native \w/, + + // Easily-parseable/retrievable ID or TAG or CLASS selectors + rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, + + rsibling = /[+~]/, + + // CSS escapes + // http://www.w3.org/TR/CSS21/syndata.html#escaped-characters + runescape = new RegExp( "\\\\[\\da-fA-F]{1,6}" + whitespace + "?|\\\\([^\\r\\n\\f])", "g" ), + funescape = function( escape, nonHex ) { + var high = "0x" + escape.slice( 1 ) - 0x10000; + + return nonHex ? + + // Strip the backslash prefix from a non-hex escape sequence + nonHex : + + // Replace a hexadecimal escape sequence with the encoded Unicode code point + // Support: IE <=11+ + // For values outside the Basic Multilingual Plane (BMP), manually construct a + // surrogate pair + high < 0 ? + String.fromCharCode( high + 0x10000 ) : + String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); + }, + + // CSS string/identifier serialization + // https://drafts.csswg.org/cssom/#common-serializing-idioms + rcssescape = /([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g, + fcssescape = function( ch, asCodePoint ) { + if ( asCodePoint ) { + + // U+0000 NULL becomes U+FFFD REPLACEMENT CHARACTER + if ( ch === "\0" ) { + return "\uFFFD"; + } + + // Control characters and (dependent upon position) numbers get escaped as code points + return ch.slice( 0, -1 ) + "\\" + + ch.charCodeAt( ch.length - 1 ).toString( 16 ) + " "; + } + + // Other potentially-special ASCII characters get backslash-escaped + return "\\" + ch; + }, + + // Used for iframes + // See setDocument() + // Removing the function wrapper causes a "Permission Denied" + // error in IE + unloadHandler = function() { + setDocument(); + }, + + inDisabledFieldset = addCombinator( + function( elem ) { + return elem.disabled === true && elem.nodeName.toLowerCase() === "fieldset"; + }, + { dir: "parentNode", next: "legend" } + ); + +// Optimize for push.apply( _, NodeList ) +try { + push.apply( + ( arr = slice.call( preferredDoc.childNodes ) ), + preferredDoc.childNodes + ); + + // Support: Android<4.0 + // Detect silently failing push.apply + // eslint-disable-next-line no-unused-expressions + arr[ preferredDoc.childNodes.length ].nodeType; +} catch ( e ) { + push = { apply: arr.length ? + + // Leverage slice if possible + function( target, els ) { + pushNative.apply( target, slice.call( els ) ); + } : + + // Support: IE<9 + // Otherwise append directly + function( target, els ) { + var j = target.length, + i = 0; + + // Can't trust NodeList.length + while ( ( target[ j++ ] = els[ i++ ] ) ) {} + target.length = j - 1; + } + }; +} + +function Sizzle( selector, context, results, seed ) { + var m, i, elem, nid, match, groups, newSelector, + newContext = context && context.ownerDocument, + + // nodeType defaults to 9, since context defaults to document + nodeType = context ? context.nodeType : 9; + + results = results || []; + + // Return early from calls with invalid selector or context + if ( typeof selector !== "string" || !selector || + nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) { + + return results; + } + + // Try to shortcut find operations (as opposed to filters) in HTML documents + if ( !seed ) { + setDocument( context ); + context = context || document; + + if ( documentIsHTML ) { + + // If the selector is sufficiently simple, try using a "get*By*" DOM method + // (excepting DocumentFragment context, where the methods don't exist) + if ( nodeType !== 11 && ( match = rquickExpr.exec( selector ) ) ) { + + // ID selector + if ( ( m = match[ 1 ] ) ) { + + // Document context + if ( nodeType === 9 ) { + if ( ( elem = context.getElementById( m ) ) ) { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( elem.id === m ) { + results.push( elem ); + return results; + } + } else { + return results; + } + + // Element context + } else { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( newContext && ( elem = newContext.getElementById( m ) ) && + contains( context, elem ) && + elem.id === m ) { + + results.push( elem ); + return results; + } + } + + // Type selector + } else if ( match[ 2 ] ) { + push.apply( results, context.getElementsByTagName( selector ) ); + return results; + + // Class selector + } else if ( ( m = match[ 3 ] ) && support.getElementsByClassName && + context.getElementsByClassName ) { + + push.apply( results, context.getElementsByClassName( m ) ); + return results; + } + } + + // Take advantage of querySelectorAll + if ( support.qsa && + !nonnativeSelectorCache[ selector + " " ] && + ( !rbuggyQSA || !rbuggyQSA.test( selector ) ) && + + // Support: IE 8 only + // Exclude object elements + ( nodeType !== 1 || context.nodeName.toLowerCase() !== "object" ) ) { + + newSelector = selector; + newContext = context; + + // qSA considers elements outside a scoping root when evaluating child or + // descendant combinators, which is not what we want. + // In such cases, we work around the behavior by prefixing every selector in the + // list with an ID selector referencing the scope context. + // The technique has to be used as well when a leading combinator is used + // as such selectors are not recognized by querySelectorAll. + // Thanks to Andrew Dupont for this technique. + if ( nodeType === 1 && + ( rdescend.test( selector ) || rcombinators.test( selector ) ) ) { + + // Expand context for sibling selectors + newContext = rsibling.test( selector ) && testContext( context.parentNode ) || + context; + + // We can use :scope instead of the ID hack if the browser + // supports it & if we're not changing the context. + if ( newContext !== context || !support.scope ) { + + // Capture the context ID, setting it first if necessary + if ( ( nid = context.getAttribute( "id" ) ) ) { + nid = nid.replace( rcssescape, fcssescape ); + } else { + context.setAttribute( "id", ( nid = expando ) ); + } + } + + // Prefix every selector in the list + groups = tokenize( selector ); + i = groups.length; + while ( i-- ) { + groups[ i ] = ( nid ? "#" + nid : ":scope" ) + " " + + toSelector( groups[ i ] ); + } + newSelector = groups.join( "," ); + } + + try { + push.apply( results, + newContext.querySelectorAll( newSelector ) + ); + return results; + } catch ( qsaError ) { + nonnativeSelectorCache( selector, true ); + } finally { + if ( nid === expando ) { + context.removeAttribute( "id" ); + } + } + } + } + } + + // All others + return select( selector.replace( rtrim, "$1" ), context, results, seed ); +} + +/** + * Create key-value caches of limited size + * @returns {function(string, object)} Returns the Object data after storing it on itself with + * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) + * deleting the oldest entry + */ +function createCache() { + var keys = []; + + function cache( key, value ) { + + // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) + if ( keys.push( key + " " ) > Expr.cacheLength ) { + + // Only keep the most recent entries + delete cache[ keys.shift() ]; + } + return ( cache[ key + " " ] = value ); + } + return cache; +} + +/** + * Mark a function for special use by Sizzle + * @param {Function} fn The function to mark + */ +function markFunction( fn ) { + fn[ expando ] = true; + return fn; +} + +/** + * Support testing using an element + * @param {Function} fn Passed the created element and returns a boolean result + */ +function assert( fn ) { + var el = document.createElement( "fieldset" ); + + try { + return !!fn( el ); + } catch ( e ) { + return false; + } finally { + + // Remove from its parent by default + if ( el.parentNode ) { + el.parentNode.removeChild( el ); + } + + // release memory in IE + el = null; + } +} + +/** + * Adds the same handler for all of the specified attrs + * @param {String} attrs Pipe-separated list of attributes + * @param {Function} handler The method that will be applied + */ +function addHandle( attrs, handler ) { + var arr = attrs.split( "|" ), + i = arr.length; + + while ( i-- ) { + Expr.attrHandle[ arr[ i ] ] = handler; + } +} + +/** + * Checks document order of two siblings + * @param {Element} a + * @param {Element} b + * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b + */ +function siblingCheck( a, b ) { + var cur = b && a, + diff = cur && a.nodeType === 1 && b.nodeType === 1 && + a.sourceIndex - b.sourceIndex; + + // Use IE sourceIndex if available on both nodes + if ( diff ) { + return diff; + } + + // Check if b follows a + if ( cur ) { + while ( ( cur = cur.nextSibling ) ) { + if ( cur === b ) { + return -1; + } + } + } + + return a ? 1 : -1; +} + +/** + * Returns a function to use in pseudos for input types + * @param {String} type + */ +function createInputPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for buttons + * @param {String} type + */ +function createButtonPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return ( name === "input" || name === "button" ) && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for :enabled/:disabled + * @param {Boolean} disabled true for :disabled; false for :enabled + */ +function createDisabledPseudo( disabled ) { + + // Known :disabled false positives: fieldset[disabled] > legend:nth-of-type(n+2) :can-disable + return function( elem ) { + + // Only certain elements can match :enabled or :disabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-enabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-disabled + if ( "form" in elem ) { + + // Check for inherited disabledness on relevant non-disabled elements: + // * listed form-associated elements in a disabled fieldset + // https://html.spec.whatwg.org/multipage/forms.html#category-listed + // https://html.spec.whatwg.org/multipage/forms.html#concept-fe-disabled + // * option elements in a disabled optgroup + // https://html.spec.whatwg.org/multipage/forms.html#concept-option-disabled + // All such elements have a "form" property. + if ( elem.parentNode && elem.disabled === false ) { + + // Option elements defer to a parent optgroup if present + if ( "label" in elem ) { + if ( "label" in elem.parentNode ) { + return elem.parentNode.disabled === disabled; + } else { + return elem.disabled === disabled; + } + } + + // Support: IE 6 - 11 + // Use the isDisabled shortcut property to check for disabled fieldset ancestors + return elem.isDisabled === disabled || + + // Where there is no isDisabled, check manually + /* jshint -W018 */ + elem.isDisabled !== !disabled && + inDisabledFieldset( elem ) === disabled; + } + + return elem.disabled === disabled; + + // Try to winnow out elements that can't be disabled before trusting the disabled property. + // Some victims get caught in our net (label, legend, menu, track), but it shouldn't + // even exist on them, let alone have a boolean value. + } else if ( "label" in elem ) { + return elem.disabled === disabled; + } + + // Remaining elements are neither :enabled nor :disabled + return false; + }; +} + +/** + * Returns a function to use in pseudos for positionals + * @param {Function} fn + */ +function createPositionalPseudo( fn ) { + return markFunction( function( argument ) { + argument = +argument; + return markFunction( function( seed, matches ) { + var j, + matchIndexes = fn( [], seed.length, argument ), + i = matchIndexes.length; + + // Match elements found at the specified indexes + while ( i-- ) { + if ( seed[ ( j = matchIndexes[ i ] ) ] ) { + seed[ j ] = !( matches[ j ] = seed[ j ] ); + } + } + } ); + } ); +} + +/** + * Checks a node for validity as a Sizzle context + * @param {Element|Object=} context + * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value + */ +function testContext( context ) { + return context && typeof context.getElementsByTagName !== "undefined" && context; +} + +// Expose support vars for convenience +support = Sizzle.support = {}; + +/** + * Detects XML nodes + * @param {Element|Object} elem An element or a document + * @returns {Boolean} True iff elem is a non-HTML XML node + */ +isXML = Sizzle.isXML = function( elem ) { + var namespace = elem && elem.namespaceURI, + docElem = elem && ( elem.ownerDocument || elem ).documentElement; + + // Support: IE <=8 + // Assume HTML when documentElement doesn't yet exist, such as inside loading iframes + // https://bugs.jquery.com/ticket/4833 + return !rhtml.test( namespace || docElem && docElem.nodeName || "HTML" ); +}; + +/** + * Sets document-related variables once based on the current document + * @param {Element|Object} [doc] An element or document object to use to set the document + * @returns {Object} Returns the current document + */ +setDocument = Sizzle.setDocument = function( node ) { + var hasCompare, subWindow, + doc = node ? node.ownerDocument || node : preferredDoc; + + // Return early if doc is invalid or already selected + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( doc == document || doc.nodeType !== 9 || !doc.documentElement ) { + return document; + } + + // Update global variables + document = doc; + docElem = document.documentElement; + documentIsHTML = !isXML( document ); + + // Support: IE 9 - 11+, Edge 12 - 18+ + // Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936) + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( preferredDoc != document && + ( subWindow = document.defaultView ) && subWindow.top !== subWindow ) { + + // Support: IE 11, Edge + if ( subWindow.addEventListener ) { + subWindow.addEventListener( "unload", unloadHandler, false ); + + // Support: IE 9 - 10 only + } else if ( subWindow.attachEvent ) { + subWindow.attachEvent( "onunload", unloadHandler ); + } + } + + // Support: IE 8 - 11+, Edge 12 - 18+, Chrome <=16 - 25 only, Firefox <=3.6 - 31 only, + // Safari 4 - 5 only, Opera <=11.6 - 12.x only + // IE/Edge & older browsers don't support the :scope pseudo-class. + // Support: Safari 6.0 only + // Safari 6.0 supports :scope but it's an alias of :root there. + support.scope = assert( function( el ) { + docElem.appendChild( el ).appendChild( document.createElement( "div" ) ); + return typeof el.querySelectorAll !== "undefined" && + !el.querySelectorAll( ":scope fieldset div" ).length; + } ); + + /* Attributes + ---------------------------------------------------------------------- */ + + // Support: IE<8 + // Verify that getAttribute really returns attributes and not properties + // (excepting IE8 booleans) + support.attributes = assert( function( el ) { + el.className = "i"; + return !el.getAttribute( "className" ); + } ); + + /* getElement(s)By* + ---------------------------------------------------------------------- */ + + // Check if getElementsByTagName("*") returns only elements + support.getElementsByTagName = assert( function( el ) { + el.appendChild( document.createComment( "" ) ); + return !el.getElementsByTagName( "*" ).length; + } ); + + // Support: IE<9 + support.getElementsByClassName = rnative.test( document.getElementsByClassName ); + + // Support: IE<10 + // Check if getElementById returns elements by name + // The broken getElementById methods don't pick up programmatically-set names, + // so use a roundabout getElementsByName test + support.getById = assert( function( el ) { + docElem.appendChild( el ).id = expando; + return !document.getElementsByName || !document.getElementsByName( expando ).length; + } ); + + // ID filter and find + if ( support.getById ) { + Expr.filter[ "ID" ] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + return elem.getAttribute( "id" ) === attrId; + }; + }; + Expr.find[ "ID" ] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var elem = context.getElementById( id ); + return elem ? [ elem ] : []; + } + }; + } else { + Expr.filter[ "ID" ] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + var node = typeof elem.getAttributeNode !== "undefined" && + elem.getAttributeNode( "id" ); + return node && node.value === attrId; + }; + }; + + // Support: IE 6 - 7 only + // getElementById is not reliable as a find shortcut + Expr.find[ "ID" ] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var node, i, elems, + elem = context.getElementById( id ); + + if ( elem ) { + + // Verify the id attribute + node = elem.getAttributeNode( "id" ); + if ( node && node.value === id ) { + return [ elem ]; + } + + // Fall back on getElementsByName + elems = context.getElementsByName( id ); + i = 0; + while ( ( elem = elems[ i++ ] ) ) { + node = elem.getAttributeNode( "id" ); + if ( node && node.value === id ) { + return [ elem ]; + } + } + } + + return []; + } + }; + } + + // Tag + Expr.find[ "TAG" ] = support.getElementsByTagName ? + function( tag, context ) { + if ( typeof context.getElementsByTagName !== "undefined" ) { + return context.getElementsByTagName( tag ); + + // DocumentFragment nodes don't have gEBTN + } else if ( support.qsa ) { + return context.querySelectorAll( tag ); + } + } : + + function( tag, context ) { + var elem, + tmp = [], + i = 0, + + // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too + results = context.getElementsByTagName( tag ); + + // Filter out possible comments + if ( tag === "*" ) { + while ( ( elem = results[ i++ ] ) ) { + if ( elem.nodeType === 1 ) { + tmp.push( elem ); + } + } + + return tmp; + } + return results; + }; + + // Class + Expr.find[ "CLASS" ] = support.getElementsByClassName && function( className, context ) { + if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) { + return context.getElementsByClassName( className ); + } + }; + + /* QSA/matchesSelector + ---------------------------------------------------------------------- */ + + // QSA and matchesSelector support + + // matchesSelector(:active) reports false when true (IE9/Opera 11.5) + rbuggyMatches = []; + + // qSa(:focus) reports false when true (Chrome 21) + // We allow this because of a bug in IE8/9 that throws an error + // whenever `document.activeElement` is accessed on an iframe + // So, we allow :focus to pass through QSA all the time to avoid the IE error + // See https://bugs.jquery.com/ticket/13378 + rbuggyQSA = []; + + if ( ( support.qsa = rnative.test( document.querySelectorAll ) ) ) { + + // Build QSA regex + // Regex strategy adopted from Diego Perini + assert( function( el ) { + + var input; + + // Select is set to empty string on purpose + // This is to test IE's treatment of not explicitly + // setting a boolean content attribute, + // since its presence should be enough + // https://bugs.jquery.com/ticket/12359 + docElem.appendChild( el ).innerHTML = "" + + ""; + + // Support: IE8, Opera 11-12.16 + // Nothing should be selected when empty strings follow ^= or $= or *= + // The test attribute must be unknown in Opera but "safe" for WinRT + // https://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section + if ( el.querySelectorAll( "[msallowcapture^='']" ).length ) { + rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); + } + + // Support: IE8 + // Boolean attributes and "value" are not treated correctly + if ( !el.querySelectorAll( "[selected]" ).length ) { + rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); + } + + // Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+ + if ( !el.querySelectorAll( "[id~=" + expando + "-]" ).length ) { + rbuggyQSA.push( "~=" ); + } + + // Support: IE 11+, Edge 15 - 18+ + // IE 11/Edge don't find elements on a `[name='']` query in some cases. + // Adding a temporary attribute to the document before the selection works + // around the issue. + // Interestingly, IE 10 & older don't seem to have the issue. + input = document.createElement( "input" ); + input.setAttribute( "name", "" ); + el.appendChild( input ); + if ( !el.querySelectorAll( "[name='']" ).length ) { + rbuggyQSA.push( "\\[" + whitespace + "*name" + whitespace + "*=" + + whitespace + "*(?:''|\"\")" ); + } + + // Webkit/Opera - :checked should return selected option elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + // IE8 throws error here and will not see later tests + if ( !el.querySelectorAll( ":checked" ).length ) { + rbuggyQSA.push( ":checked" ); + } + + // Support: Safari 8+, iOS 8+ + // https://bugs.webkit.org/show_bug.cgi?id=136851 + // In-page `selector#id sibling-combinator selector` fails + if ( !el.querySelectorAll( "a#" + expando + "+*" ).length ) { + rbuggyQSA.push( ".#.+[+~]" ); + } + + // Support: Firefox <=3.6 - 5 only + // Old Firefox doesn't throw on a badly-escaped identifier. + el.querySelectorAll( "\\\f" ); + rbuggyQSA.push( "[\\r\\n\\f]" ); + } ); + + assert( function( el ) { + el.innerHTML = "" + + ""; + + // Support: Windows 8 Native Apps + // The type and name attributes are restricted during .innerHTML assignment + var input = document.createElement( "input" ); + input.setAttribute( "type", "hidden" ); + el.appendChild( input ).setAttribute( "name", "D" ); + + // Support: IE8 + // Enforce case-sensitivity of name attribute + if ( el.querySelectorAll( "[name=d]" ).length ) { + rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" ); + } + + // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) + // IE8 throws error here and will not see later tests + if ( el.querySelectorAll( ":enabled" ).length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Support: IE9-11+ + // IE's :disabled selector does not pick up the children of disabled fieldsets + docElem.appendChild( el ).disabled = true; + if ( el.querySelectorAll( ":disabled" ).length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Support: Opera 10 - 11 only + // Opera 10-11 does not throw on post-comma invalid pseudos + el.querySelectorAll( "*,:x" ); + rbuggyQSA.push( ",.*:" ); + } ); + } + + if ( ( support.matchesSelector = rnative.test( ( matches = docElem.matches || + docElem.webkitMatchesSelector || + docElem.mozMatchesSelector || + docElem.oMatchesSelector || + docElem.msMatchesSelector ) ) ) ) { + + assert( function( el ) { + + // Check to see if it's possible to do matchesSelector + // on a disconnected node (IE 9) + support.disconnectedMatch = matches.call( el, "*" ); + + // This should fail with an exception + // Gecko does not error, returns false instead + matches.call( el, "[s!='']:x" ); + rbuggyMatches.push( "!=", pseudos ); + } ); + } + + rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join( "|" ) ); + rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join( "|" ) ); + + /* Contains + ---------------------------------------------------------------------- */ + hasCompare = rnative.test( docElem.compareDocumentPosition ); + + // Element contains another + // Purposefully self-exclusive + // As in, an element does not contain itself + contains = hasCompare || rnative.test( docElem.contains ) ? + function( a, b ) { + var adown = a.nodeType === 9 ? a.documentElement : a, + bup = b && b.parentNode; + return a === bup || !!( bup && bup.nodeType === 1 && ( + adown.contains ? + adown.contains( bup ) : + a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 + ) ); + } : + function( a, b ) { + if ( b ) { + while ( ( b = b.parentNode ) ) { + if ( b === a ) { + return true; + } + } + } + return false; + }; + + /* Sorting + ---------------------------------------------------------------------- */ + + // Document order sorting + sortOrder = hasCompare ? + function( a, b ) { + + // Flag for duplicate removal + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + // Sort on method existence if only one input has compareDocumentPosition + var compare = !a.compareDocumentPosition - !b.compareDocumentPosition; + if ( compare ) { + return compare; + } + + // Calculate position if both inputs belong to the same document + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + compare = ( a.ownerDocument || a ) == ( b.ownerDocument || b ) ? + a.compareDocumentPosition( b ) : + + // Otherwise we know they are disconnected + 1; + + // Disconnected nodes + if ( compare & 1 || + ( !support.sortDetached && b.compareDocumentPosition( a ) === compare ) ) { + + // Choose the first element that is related to our preferred document + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( a == document || a.ownerDocument == preferredDoc && + contains( preferredDoc, a ) ) { + return -1; + } + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( b == document || b.ownerDocument == preferredDoc && + contains( preferredDoc, b ) ) { + return 1; + } + + // Maintain original order + return sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + } + + return compare & 4 ? -1 : 1; + } : + function( a, b ) { + + // Exit early if the nodes are identical + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + var cur, + i = 0, + aup = a.parentNode, + bup = b.parentNode, + ap = [ a ], + bp = [ b ]; + + // Parentless nodes are either documents or disconnected + if ( !aup || !bup ) { + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + /* eslint-disable eqeqeq */ + return a == document ? -1 : + b == document ? 1 : + /* eslint-enable eqeqeq */ + aup ? -1 : + bup ? 1 : + sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + + // If the nodes are siblings, we can do a quick check + } else if ( aup === bup ) { + return siblingCheck( a, b ); + } + + // Otherwise we need full lists of their ancestors for comparison + cur = a; + while ( ( cur = cur.parentNode ) ) { + ap.unshift( cur ); + } + cur = b; + while ( ( cur = cur.parentNode ) ) { + bp.unshift( cur ); + } + + // Walk down the tree looking for a discrepancy + while ( ap[ i ] === bp[ i ] ) { + i++; + } + + return i ? + + // Do a sibling check if the nodes have a common ancestor + siblingCheck( ap[ i ], bp[ i ] ) : + + // Otherwise nodes in our document sort first + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + /* eslint-disable eqeqeq */ + ap[ i ] == preferredDoc ? -1 : + bp[ i ] == preferredDoc ? 1 : + /* eslint-enable eqeqeq */ + 0; + }; + + return document; +}; + +Sizzle.matches = function( expr, elements ) { + return Sizzle( expr, null, null, elements ); +}; + +Sizzle.matchesSelector = function( elem, expr ) { + setDocument( elem ); + + if ( support.matchesSelector && documentIsHTML && + !nonnativeSelectorCache[ expr + " " ] && + ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && + ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { + + try { + var ret = matches.call( elem, expr ); + + // IE 9's matchesSelector returns false on disconnected nodes + if ( ret || support.disconnectedMatch || + + // As well, disconnected nodes are said to be in a document + // fragment in IE 9 + elem.document && elem.document.nodeType !== 11 ) { + return ret; + } + } catch ( e ) { + nonnativeSelectorCache( expr, true ); + } + } + + return Sizzle( expr, document, null, [ elem ] ).length > 0; +}; + +Sizzle.contains = function( context, elem ) { + + // Set document vars if needed + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( ( context.ownerDocument || context ) != document ) { + setDocument( context ); + } + return contains( context, elem ); +}; + +Sizzle.attr = function( elem, name ) { + + // Set document vars if needed + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( ( elem.ownerDocument || elem ) != document ) { + setDocument( elem ); + } + + var fn = Expr.attrHandle[ name.toLowerCase() ], + + // Don't get fooled by Object.prototype properties (jQuery #13807) + val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? + fn( elem, name, !documentIsHTML ) : + undefined; + + return val !== undefined ? + val : + support.attributes || !documentIsHTML ? + elem.getAttribute( name ) : + ( val = elem.getAttributeNode( name ) ) && val.specified ? + val.value : + null; +}; + +Sizzle.escape = function( sel ) { + return ( sel + "" ).replace( rcssescape, fcssescape ); +}; + +Sizzle.error = function( msg ) { + throw new Error( "Syntax error, unrecognized expression: " + msg ); +}; + +/** + * Document sorting and removing duplicates + * @param {ArrayLike} results + */ +Sizzle.uniqueSort = function( results ) { + var elem, + duplicates = [], + j = 0, + i = 0; + + // Unless we *know* we can detect duplicates, assume their presence + hasDuplicate = !support.detectDuplicates; + sortInput = !support.sortStable && results.slice( 0 ); + results.sort( sortOrder ); + + if ( hasDuplicate ) { + while ( ( elem = results[ i++ ] ) ) { + if ( elem === results[ i ] ) { + j = duplicates.push( i ); + } + } + while ( j-- ) { + results.splice( duplicates[ j ], 1 ); + } + } + + // Clear input after sorting to release objects + // See https://github.com/jquery/sizzle/pull/225 + sortInput = null; + + return results; +}; + +/** + * Utility function for retrieving the text value of an array of DOM nodes + * @param {Array|Element} elem + */ +getText = Sizzle.getText = function( elem ) { + var node, + ret = "", + i = 0, + nodeType = elem.nodeType; + + if ( !nodeType ) { + + // If no nodeType, this is expected to be an array + while ( ( node = elem[ i++ ] ) ) { + + // Do not traverse comment nodes + ret += getText( node ); + } + } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { + + // Use textContent for elements + // innerText usage removed for consistency of new lines (jQuery #11153) + if ( typeof elem.textContent === "string" ) { + return elem.textContent; + } else { + + // Traverse its children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + ret += getText( elem ); + } + } + } else if ( nodeType === 3 || nodeType === 4 ) { + return elem.nodeValue; + } + + // Do not include comment or processing instruction nodes + + return ret; +}; + +Expr = Sizzle.selectors = { + + // Can be adjusted by the user + cacheLength: 50, + + createPseudo: markFunction, + + match: matchExpr, + + attrHandle: {}, + + find: {}, + + relative: { + ">": { dir: "parentNode", first: true }, + " ": { dir: "parentNode" }, + "+": { dir: "previousSibling", first: true }, + "~": { dir: "previousSibling" } + }, + + preFilter: { + "ATTR": function( match ) { + match[ 1 ] = match[ 1 ].replace( runescape, funescape ); + + // Move the given value to match[3] whether quoted or unquoted + match[ 3 ] = ( match[ 3 ] || match[ 4 ] || + match[ 5 ] || "" ).replace( runescape, funescape ); + + if ( match[ 2 ] === "~=" ) { + match[ 3 ] = " " + match[ 3 ] + " "; + } + + return match.slice( 0, 4 ); + }, + + "CHILD": function( match ) { + + /* matches from matchExpr["CHILD"] + 1 type (only|nth|...) + 2 what (child|of-type) + 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) + 4 xn-component of xn+y argument ([+-]?\d*n|) + 5 sign of xn-component + 6 x of xn-component + 7 sign of y-component + 8 y of y-component + */ + match[ 1 ] = match[ 1 ].toLowerCase(); + + if ( match[ 1 ].slice( 0, 3 ) === "nth" ) { + + // nth-* requires argument + if ( !match[ 3 ] ) { + Sizzle.error( match[ 0 ] ); + } + + // numeric x and y parameters for Expr.filter.CHILD + // remember that false/true cast respectively to 0/1 + match[ 4 ] = +( match[ 4 ] ? + match[ 5 ] + ( match[ 6 ] || 1 ) : + 2 * ( match[ 3 ] === "even" || match[ 3 ] === "odd" ) ); + match[ 5 ] = +( ( match[ 7 ] + match[ 8 ] ) || match[ 3 ] === "odd" ); + + // other types prohibit arguments + } else if ( match[ 3 ] ) { + Sizzle.error( match[ 0 ] ); + } + + return match; + }, + + "PSEUDO": function( match ) { + var excess, + unquoted = !match[ 6 ] && match[ 2 ]; + + if ( matchExpr[ "CHILD" ].test( match[ 0 ] ) ) { + return null; + } + + // Accept quoted arguments as-is + if ( match[ 3 ] ) { + match[ 2 ] = match[ 4 ] || match[ 5 ] || ""; + + // Strip excess characters from unquoted arguments + } else if ( unquoted && rpseudo.test( unquoted ) && + + // Get excess from tokenize (recursively) + ( excess = tokenize( unquoted, true ) ) && + + // advance to the next closing parenthesis + ( excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length ) ) { + + // excess is a negative index + match[ 0 ] = match[ 0 ].slice( 0, excess ); + match[ 2 ] = unquoted.slice( 0, excess ); + } + + // Return only captures needed by the pseudo filter method (type and argument) + return match.slice( 0, 3 ); + } + }, + + filter: { + + "TAG": function( nodeNameSelector ) { + var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); + return nodeNameSelector === "*" ? + function() { + return true; + } : + function( elem ) { + return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; + }; + }, + + "CLASS": function( className ) { + var pattern = classCache[ className + " " ]; + + return pattern || + ( pattern = new RegExp( "(^|" + whitespace + + ")" + className + "(" + whitespace + "|$)" ) ) && classCache( + className, function( elem ) { + return pattern.test( + typeof elem.className === "string" && elem.className || + typeof elem.getAttribute !== "undefined" && + elem.getAttribute( "class" ) || + "" + ); + } ); + }, + + "ATTR": function( name, operator, check ) { + return function( elem ) { + var result = Sizzle.attr( elem, name ); + + if ( result == null ) { + return operator === "!="; + } + if ( !operator ) { + return true; + } + + result += ""; + + /* eslint-disable max-len */ + + return operator === "=" ? result === check : + operator === "!=" ? result !== check : + operator === "^=" ? check && result.indexOf( check ) === 0 : + operator === "*=" ? check && result.indexOf( check ) > -1 : + operator === "$=" ? check && result.slice( -check.length ) === check : + operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 : + operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : + false; + /* eslint-enable max-len */ + + }; + }, + + "CHILD": function( type, what, _argument, first, last ) { + var simple = type.slice( 0, 3 ) !== "nth", + forward = type.slice( -4 ) !== "last", + ofType = what === "of-type"; + + return first === 1 && last === 0 ? + + // Shortcut for :nth-*(n) + function( elem ) { + return !!elem.parentNode; + } : + + function( elem, _context, xml ) { + var cache, uniqueCache, outerCache, node, nodeIndex, start, + dir = simple !== forward ? "nextSibling" : "previousSibling", + parent = elem.parentNode, + name = ofType && elem.nodeName.toLowerCase(), + useCache = !xml && !ofType, + diff = false; + + if ( parent ) { + + // :(first|last|only)-(child|of-type) + if ( simple ) { + while ( dir ) { + node = elem; + while ( ( node = node[ dir ] ) ) { + if ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) { + + return false; + } + } + + // Reverse direction for :only-* (if we haven't yet done so) + start = dir = type === "only" && !start && "nextSibling"; + } + return true; + } + + start = [ forward ? parent.firstChild : parent.lastChild ]; + + // non-xml :nth-child(...) stores cache data on `parent` + if ( forward && useCache ) { + + // Seek `elem` from a previously-cached index + + // ...in a gzip-friendly way + node = parent; + outerCache = node[ expando ] || ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex && cache[ 2 ]; + node = nodeIndex && parent.childNodes[ nodeIndex ]; + + while ( ( node = ++nodeIndex && node && node[ dir ] || + + // Fallback to seeking `elem` from the start + ( diff = nodeIndex = 0 ) || start.pop() ) ) { + + // When found, cache indexes on `parent` and break + if ( node.nodeType === 1 && ++diff && node === elem ) { + uniqueCache[ type ] = [ dirruns, nodeIndex, diff ]; + break; + } + } + + } else { + + // Use previously-cached element index if available + if ( useCache ) { + + // ...in a gzip-friendly way + node = elem; + outerCache = node[ expando ] || ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex; + } + + // xml :nth-child(...) + // or :nth-last-child(...) or :nth(-last)?-of-type(...) + if ( diff === false ) { + + // Use the same loop as above to seek `elem` from the start + while ( ( node = ++nodeIndex && node && node[ dir ] || + ( diff = nodeIndex = 0 ) || start.pop() ) ) { + + if ( ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) && + ++diff ) { + + // Cache the index of each encountered element + if ( useCache ) { + outerCache = node[ expando ] || + ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + uniqueCache[ type ] = [ dirruns, diff ]; + } + + if ( node === elem ) { + break; + } + } + } + } + } + + // Incorporate the offset, then check against cycle size + diff -= last; + return diff === first || ( diff % first === 0 && diff / first >= 0 ); + } + }; + }, + + "PSEUDO": function( pseudo, argument ) { + + // pseudo-class names are case-insensitive + // http://www.w3.org/TR/selectors/#pseudo-classes + // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters + // Remember that setFilters inherits from pseudos + var args, + fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || + Sizzle.error( "unsupported pseudo: " + pseudo ); + + // The user may use createPseudo to indicate that + // arguments are needed to create the filter function + // just as Sizzle does + if ( fn[ expando ] ) { + return fn( argument ); + } + + // But maintain support for old signatures + if ( fn.length > 1 ) { + args = [ pseudo, pseudo, "", argument ]; + return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? + markFunction( function( seed, matches ) { + var idx, + matched = fn( seed, argument ), + i = matched.length; + while ( i-- ) { + idx = indexOf( seed, matched[ i ] ); + seed[ idx ] = !( matches[ idx ] = matched[ i ] ); + } + } ) : + function( elem ) { + return fn( elem, 0, args ); + }; + } + + return fn; + } + }, + + pseudos: { + + // Potentially complex pseudos + "not": markFunction( function( selector ) { + + // Trim the selector passed to compile + // to avoid treating leading and trailing + // spaces as combinators + var input = [], + results = [], + matcher = compile( selector.replace( rtrim, "$1" ) ); + + return matcher[ expando ] ? + markFunction( function( seed, matches, _context, xml ) { + var elem, + unmatched = matcher( seed, null, xml, [] ), + i = seed.length; + + // Match elements unmatched by `matcher` + while ( i-- ) { + if ( ( elem = unmatched[ i ] ) ) { + seed[ i ] = !( matches[ i ] = elem ); + } + } + } ) : + function( elem, _context, xml ) { + input[ 0 ] = elem; + matcher( input, null, xml, results ); + + // Don't keep the element (issue #299) + input[ 0 ] = null; + return !results.pop(); + }; + } ), + + "has": markFunction( function( selector ) { + return function( elem ) { + return Sizzle( selector, elem ).length > 0; + }; + } ), + + "contains": markFunction( function( text ) { + text = text.replace( runescape, funescape ); + return function( elem ) { + return ( elem.textContent || getText( elem ) ).indexOf( text ) > -1; + }; + } ), + + // "Whether an element is represented by a :lang() selector + // is based solely on the element's language value + // being equal to the identifier C, + // or beginning with the identifier C immediately followed by "-". + // The matching of C against the element's language value is performed case-insensitively. + // The identifier C does not have to be a valid language name." + // http://www.w3.org/TR/selectors/#lang-pseudo + "lang": markFunction( function( lang ) { + + // lang value must be a valid identifier + if ( !ridentifier.test( lang || "" ) ) { + Sizzle.error( "unsupported lang: " + lang ); + } + lang = lang.replace( runescape, funescape ).toLowerCase(); + return function( elem ) { + var elemLang; + do { + if ( ( elemLang = documentIsHTML ? + elem.lang : + elem.getAttribute( "xml:lang" ) || elem.getAttribute( "lang" ) ) ) { + + elemLang = elemLang.toLowerCase(); + return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; + } + } while ( ( elem = elem.parentNode ) && elem.nodeType === 1 ); + return false; + }; + } ), + + // Miscellaneous + "target": function( elem ) { + var hash = window.location && window.location.hash; + return hash && hash.slice( 1 ) === elem.id; + }, + + "root": function( elem ) { + return elem === docElem; + }, + + "focus": function( elem ) { + return elem === document.activeElement && + ( !document.hasFocus || document.hasFocus() ) && + !!( elem.type || elem.href || ~elem.tabIndex ); + }, + + // Boolean properties + "enabled": createDisabledPseudo( false ), + "disabled": createDisabledPseudo( true ), + + "checked": function( elem ) { + + // In CSS3, :checked should return both checked and selected elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + var nodeName = elem.nodeName.toLowerCase(); + return ( nodeName === "input" && !!elem.checked ) || + ( nodeName === "option" && !!elem.selected ); + }, + + "selected": function( elem ) { + + // Accessing this property makes selected-by-default + // options in Safari work properly + if ( elem.parentNode ) { + // eslint-disable-next-line no-unused-expressions + elem.parentNode.selectedIndex; + } + + return elem.selected === true; + }, + + // Contents + "empty": function( elem ) { + + // http://www.w3.org/TR/selectors/#empty-pseudo + // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5), + // but not by others (comment: 8; processing instruction: 7; etc.) + // nodeType < 6 works because attributes (2) do not appear as children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + if ( elem.nodeType < 6 ) { + return false; + } + } + return true; + }, + + "parent": function( elem ) { + return !Expr.pseudos[ "empty" ]( elem ); + }, + + // Element/input types + "header": function( elem ) { + return rheader.test( elem.nodeName ); + }, + + "input": function( elem ) { + return rinputs.test( elem.nodeName ); + }, + + "button": function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === "button" || name === "button"; + }, + + "text": function( elem ) { + var attr; + return elem.nodeName.toLowerCase() === "input" && + elem.type === "text" && + + // Support: IE<8 + // New HTML5 attribute values (e.g., "search") appear with elem.type === "text" + ( ( attr = elem.getAttribute( "type" ) ) == null || + attr.toLowerCase() === "text" ); + }, + + // Position-in-collection + "first": createPositionalPseudo( function() { + return [ 0 ]; + } ), + + "last": createPositionalPseudo( function( _matchIndexes, length ) { + return [ length - 1 ]; + } ), + + "eq": createPositionalPseudo( function( _matchIndexes, length, argument ) { + return [ argument < 0 ? argument + length : argument ]; + } ), + + "even": createPositionalPseudo( function( matchIndexes, length ) { + var i = 0; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "odd": createPositionalPseudo( function( matchIndexes, length ) { + var i = 1; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "lt": createPositionalPseudo( function( matchIndexes, length, argument ) { + var i = argument < 0 ? + argument + length : + argument > length ? + length : + argument; + for ( ; --i >= 0; ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "gt": createPositionalPseudo( function( matchIndexes, length, argument ) { + var i = argument < 0 ? argument + length : argument; + for ( ; ++i < length; ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ) + } +}; + +Expr.pseudos[ "nth" ] = Expr.pseudos[ "eq" ]; + +// Add button/input type pseudos +for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { + Expr.pseudos[ i ] = createInputPseudo( i ); +} +for ( i in { submit: true, reset: true } ) { + Expr.pseudos[ i ] = createButtonPseudo( i ); +} + +// Easy API for creating new setFilters +function setFilters() {} +setFilters.prototype = Expr.filters = Expr.pseudos; +Expr.setFilters = new setFilters(); + +tokenize = Sizzle.tokenize = function( selector, parseOnly ) { + var matched, match, tokens, type, + soFar, groups, preFilters, + cached = tokenCache[ selector + " " ]; + + if ( cached ) { + return parseOnly ? 0 : cached.slice( 0 ); + } + + soFar = selector; + groups = []; + preFilters = Expr.preFilter; + + while ( soFar ) { + + // Comma and first run + if ( !matched || ( match = rcomma.exec( soFar ) ) ) { + if ( match ) { + + // Don't consume trailing commas as valid + soFar = soFar.slice( match[ 0 ].length ) || soFar; + } + groups.push( ( tokens = [] ) ); + } + + matched = false; + + // Combinators + if ( ( match = rcombinators.exec( soFar ) ) ) { + matched = match.shift(); + tokens.push( { + value: matched, + + // Cast descendant combinators to space + type: match[ 0 ].replace( rtrim, " " ) + } ); + soFar = soFar.slice( matched.length ); + } + + // Filters + for ( type in Expr.filter ) { + if ( ( match = matchExpr[ type ].exec( soFar ) ) && ( !preFilters[ type ] || + ( match = preFilters[ type ]( match ) ) ) ) { + matched = match.shift(); + tokens.push( { + value: matched, + type: type, + matches: match + } ); + soFar = soFar.slice( matched.length ); + } + } + + if ( !matched ) { + break; + } + } + + // Return the length of the invalid excess + // if we're just parsing + // Otherwise, throw an error or return tokens + return parseOnly ? + soFar.length : + soFar ? + Sizzle.error( selector ) : + + // Cache the tokens + tokenCache( selector, groups ).slice( 0 ); +}; + +function toSelector( tokens ) { + var i = 0, + len = tokens.length, + selector = ""; + for ( ; i < len; i++ ) { + selector += tokens[ i ].value; + } + return selector; +} + +function addCombinator( matcher, combinator, base ) { + var dir = combinator.dir, + skip = combinator.next, + key = skip || dir, + checkNonElements = base && key === "parentNode", + doneName = done++; + + return combinator.first ? + + // Check against closest ancestor/preceding element + function( elem, context, xml ) { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + return matcher( elem, context, xml ); + } + } + return false; + } : + + // Check against all ancestor/preceding elements + function( elem, context, xml ) { + var oldCache, uniqueCache, outerCache, + newCache = [ dirruns, doneName ]; + + // We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching + if ( xml ) { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + if ( matcher( elem, context, xml ) ) { + return true; + } + } + } + } else { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + outerCache = elem[ expando ] || ( elem[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ elem.uniqueID ] || + ( outerCache[ elem.uniqueID ] = {} ); + + if ( skip && skip === elem.nodeName.toLowerCase() ) { + elem = elem[ dir ] || elem; + } else if ( ( oldCache = uniqueCache[ key ] ) && + oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) { + + // Assign to newCache so results back-propagate to previous elements + return ( newCache[ 2 ] = oldCache[ 2 ] ); + } else { + + // Reuse newcache so results back-propagate to previous elements + uniqueCache[ key ] = newCache; + + // A match means we're done; a fail means we have to keep checking + if ( ( newCache[ 2 ] = matcher( elem, context, xml ) ) ) { + return true; + } + } + } + } + } + return false; + }; +} + +function elementMatcher( matchers ) { + return matchers.length > 1 ? + function( elem, context, xml ) { + var i = matchers.length; + while ( i-- ) { + if ( !matchers[ i ]( elem, context, xml ) ) { + return false; + } + } + return true; + } : + matchers[ 0 ]; +} + +function multipleContexts( selector, contexts, results ) { + var i = 0, + len = contexts.length; + for ( ; i < len; i++ ) { + Sizzle( selector, contexts[ i ], results ); + } + return results; +} + +function condense( unmatched, map, filter, context, xml ) { + var elem, + newUnmatched = [], + i = 0, + len = unmatched.length, + mapped = map != null; + + for ( ; i < len; i++ ) { + if ( ( elem = unmatched[ i ] ) ) { + if ( !filter || filter( elem, context, xml ) ) { + newUnmatched.push( elem ); + if ( mapped ) { + map.push( i ); + } + } + } + } + + return newUnmatched; +} + +function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { + if ( postFilter && !postFilter[ expando ] ) { + postFilter = setMatcher( postFilter ); + } + if ( postFinder && !postFinder[ expando ] ) { + postFinder = setMatcher( postFinder, postSelector ); + } + return markFunction( function( seed, results, context, xml ) { + var temp, i, elem, + preMap = [], + postMap = [], + preexisting = results.length, + + // Get initial elements from seed or context + elems = seed || multipleContexts( + selector || "*", + context.nodeType ? [ context ] : context, + [] + ), + + // Prefilter to get matcher input, preserving a map for seed-results synchronization + matcherIn = preFilter && ( seed || !selector ) ? + condense( elems, preMap, preFilter, context, xml ) : + elems, + + matcherOut = matcher ? + + // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, + postFinder || ( seed ? preFilter : preexisting || postFilter ) ? + + // ...intermediate processing is necessary + [] : + + // ...otherwise use results directly + results : + matcherIn; + + // Find primary matches + if ( matcher ) { + matcher( matcherIn, matcherOut, context, xml ); + } + + // Apply postFilter + if ( postFilter ) { + temp = condense( matcherOut, postMap ); + postFilter( temp, [], context, xml ); + + // Un-match failing elements by moving them back to matcherIn + i = temp.length; + while ( i-- ) { + if ( ( elem = temp[ i ] ) ) { + matcherOut[ postMap[ i ] ] = !( matcherIn[ postMap[ i ] ] = elem ); + } + } + } + + if ( seed ) { + if ( postFinder || preFilter ) { + if ( postFinder ) { + + // Get the final matcherOut by condensing this intermediate into postFinder contexts + temp = []; + i = matcherOut.length; + while ( i-- ) { + if ( ( elem = matcherOut[ i ] ) ) { + + // Restore matcherIn since elem is not yet a final match + temp.push( ( matcherIn[ i ] = elem ) ); + } + } + postFinder( null, ( matcherOut = [] ), temp, xml ); + } + + // Move matched elements from seed to results to keep them synchronized + i = matcherOut.length; + while ( i-- ) { + if ( ( elem = matcherOut[ i ] ) && + ( temp = postFinder ? indexOf( seed, elem ) : preMap[ i ] ) > -1 ) { + + seed[ temp ] = !( results[ temp ] = elem ); + } + } + } + + // Add elements to results, through postFinder if defined + } else { + matcherOut = condense( + matcherOut === results ? + matcherOut.splice( preexisting, matcherOut.length ) : + matcherOut + ); + if ( postFinder ) { + postFinder( null, results, matcherOut, xml ); + } else { + push.apply( results, matcherOut ); + } + } + } ); +} + +function matcherFromTokens( tokens ) { + var checkContext, matcher, j, + len = tokens.length, + leadingRelative = Expr.relative[ tokens[ 0 ].type ], + implicitRelative = leadingRelative || Expr.relative[ " " ], + i = leadingRelative ? 1 : 0, + + // The foundational matcher ensures that elements are reachable from top-level context(s) + matchContext = addCombinator( function( elem ) { + return elem === checkContext; + }, implicitRelative, true ), + matchAnyContext = addCombinator( function( elem ) { + return indexOf( checkContext, elem ) > -1; + }, implicitRelative, true ), + matchers = [ function( elem, context, xml ) { + var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( + ( checkContext = context ).nodeType ? + matchContext( elem, context, xml ) : + matchAnyContext( elem, context, xml ) ); + + // Avoid hanging onto element (issue #299) + checkContext = null; + return ret; + } ]; + + for ( ; i < len; i++ ) { + if ( ( matcher = Expr.relative[ tokens[ i ].type ] ) ) { + matchers = [ addCombinator( elementMatcher( matchers ), matcher ) ]; + } else { + matcher = Expr.filter[ tokens[ i ].type ].apply( null, tokens[ i ].matches ); + + // Return special upon seeing a positional matcher + if ( matcher[ expando ] ) { + + // Find the next relative operator (if any) for proper handling + j = ++i; + for ( ; j < len; j++ ) { + if ( Expr.relative[ tokens[ j ].type ] ) { + break; + } + } + return setMatcher( + i > 1 && elementMatcher( matchers ), + i > 1 && toSelector( + + // If the preceding token was a descendant combinator, insert an implicit any-element `*` + tokens + .slice( 0, i - 1 ) + .concat( { value: tokens[ i - 2 ].type === " " ? "*" : "" } ) + ).replace( rtrim, "$1" ), + matcher, + i < j && matcherFromTokens( tokens.slice( i, j ) ), + j < len && matcherFromTokens( ( tokens = tokens.slice( j ) ) ), + j < len && toSelector( tokens ) + ); + } + matchers.push( matcher ); + } + } + + return elementMatcher( matchers ); +} + +function matcherFromGroupMatchers( elementMatchers, setMatchers ) { + var bySet = setMatchers.length > 0, + byElement = elementMatchers.length > 0, + superMatcher = function( seed, context, xml, results, outermost ) { + var elem, j, matcher, + matchedCount = 0, + i = "0", + unmatched = seed && [], + setMatched = [], + contextBackup = outermostContext, + + // We must always have either seed elements or outermost context + elems = seed || byElement && Expr.find[ "TAG" ]( "*", outermost ), + + // Use integer dirruns iff this is the outermost matcher + dirrunsUnique = ( dirruns += contextBackup == null ? 1 : Math.random() || 0.1 ), + len = elems.length; + + if ( outermost ) { + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + outermostContext = context == document || context || outermost; + } + + // Add elements passing elementMatchers directly to results + // Support: IE<9, Safari + // Tolerate NodeList properties (IE: "length"; Safari: ) matching elements by id + for ( ; i !== len && ( elem = elems[ i ] ) != null; i++ ) { + if ( byElement && elem ) { + j = 0; + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( !context && elem.ownerDocument != document ) { + setDocument( elem ); + xml = !documentIsHTML; + } + while ( ( matcher = elementMatchers[ j++ ] ) ) { + if ( matcher( elem, context || document, xml ) ) { + results.push( elem ); + break; + } + } + if ( outermost ) { + dirruns = dirrunsUnique; + } + } + + // Track unmatched elements for set filters + if ( bySet ) { + + // They will have gone through all possible matchers + if ( ( elem = !matcher && elem ) ) { + matchedCount--; + } + + // Lengthen the array for every element, matched or not + if ( seed ) { + unmatched.push( elem ); + } + } + } + + // `i` is now the count of elements visited above, and adding it to `matchedCount` + // makes the latter nonnegative. + matchedCount += i; + + // Apply set filters to unmatched elements + // NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount` + // equals `i`), unless we didn't visit _any_ elements in the above loop because we have + // no element matchers and no seed. + // Incrementing an initially-string "0" `i` allows `i` to remain a string only in that + // case, which will result in a "00" `matchedCount` that differs from `i` but is also + // numerically zero. + if ( bySet && i !== matchedCount ) { + j = 0; + while ( ( matcher = setMatchers[ j++ ] ) ) { + matcher( unmatched, setMatched, context, xml ); + } + + if ( seed ) { + + // Reintegrate element matches to eliminate the need for sorting + if ( matchedCount > 0 ) { + while ( i-- ) { + if ( !( unmatched[ i ] || setMatched[ i ] ) ) { + setMatched[ i ] = pop.call( results ); + } + } + } + + // Discard index placeholder values to get only actual matches + setMatched = condense( setMatched ); + } + + // Add matches to results + push.apply( results, setMatched ); + + // Seedless set matches succeeding multiple successful matchers stipulate sorting + if ( outermost && !seed && setMatched.length > 0 && + ( matchedCount + setMatchers.length ) > 1 ) { + + Sizzle.uniqueSort( results ); + } + } + + // Override manipulation of globals by nested matchers + if ( outermost ) { + dirruns = dirrunsUnique; + outermostContext = contextBackup; + } + + return unmatched; + }; + + return bySet ? + markFunction( superMatcher ) : + superMatcher; +} + +compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) { + var i, + setMatchers = [], + elementMatchers = [], + cached = compilerCache[ selector + " " ]; + + if ( !cached ) { + + // Generate a function of recursive functions that can be used to check each element + if ( !match ) { + match = tokenize( selector ); + } + i = match.length; + while ( i-- ) { + cached = matcherFromTokens( match[ i ] ); + if ( cached[ expando ] ) { + setMatchers.push( cached ); + } else { + elementMatchers.push( cached ); + } + } + + // Cache the compiled function + cached = compilerCache( + selector, + matcherFromGroupMatchers( elementMatchers, setMatchers ) + ); + + // Save selector and tokenization + cached.selector = selector; + } + return cached; +}; + +/** + * A low-level selection function that works with Sizzle's compiled + * selector functions + * @param {String|Function} selector A selector or a pre-compiled + * selector function built with Sizzle.compile + * @param {Element} context + * @param {Array} [results] + * @param {Array} [seed] A set of elements to match against + */ +select = Sizzle.select = function( selector, context, results, seed ) { + var i, tokens, token, type, find, + compiled = typeof selector === "function" && selector, + match = !seed && tokenize( ( selector = compiled.selector || selector ) ); + + results = results || []; + + // Try to minimize operations if there is only one selector in the list and no seed + // (the latter of which guarantees us context) + if ( match.length === 1 ) { + + // Reduce context if the leading compound selector is an ID + tokens = match[ 0 ] = match[ 0 ].slice( 0 ); + if ( tokens.length > 2 && ( token = tokens[ 0 ] ).type === "ID" && + context.nodeType === 9 && documentIsHTML && Expr.relative[ tokens[ 1 ].type ] ) { + + context = ( Expr.find[ "ID" ]( token.matches[ 0 ] + .replace( runescape, funescape ), context ) || [] )[ 0 ]; + if ( !context ) { + return results; + + // Precompiled matchers will still verify ancestry, so step up a level + } else if ( compiled ) { + context = context.parentNode; + } + + selector = selector.slice( tokens.shift().value.length ); + } + + // Fetch a seed set for right-to-left matching + i = matchExpr[ "needsContext" ].test( selector ) ? 0 : tokens.length; + while ( i-- ) { + token = tokens[ i ]; + + // Abort if we hit a combinator + if ( Expr.relative[ ( type = token.type ) ] ) { + break; + } + if ( ( find = Expr.find[ type ] ) ) { + + // Search, expanding context for leading sibling combinators + if ( ( seed = find( + token.matches[ 0 ].replace( runescape, funescape ), + rsibling.test( tokens[ 0 ].type ) && testContext( context.parentNode ) || + context + ) ) ) { + + // If seed is empty or no tokens remain, we can return early + tokens.splice( i, 1 ); + selector = seed.length && toSelector( tokens ); + if ( !selector ) { + push.apply( results, seed ); + return results; + } + + break; + } + } + } + } + + // Compile and execute a filtering function if one is not provided + // Provide `match` to avoid retokenization if we modified the selector above + ( compiled || compile( selector, match ) )( + seed, + context, + !documentIsHTML, + results, + !context || rsibling.test( selector ) && testContext( context.parentNode ) || context + ); + return results; +}; + +// One-time assignments + +// Sort stability +support.sortStable = expando.split( "" ).sort( sortOrder ).join( "" ) === expando; + +// Support: Chrome 14-35+ +// Always assume duplicates if they aren't passed to the comparison function +support.detectDuplicates = !!hasDuplicate; + +// Initialize against the default document +setDocument(); + +// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) +// Detached nodes confoundingly follow *each other* +support.sortDetached = assert( function( el ) { + + // Should return 1, but returns 4 (following) + return el.compareDocumentPosition( document.createElement( "fieldset" ) ) & 1; +} ); + +// Support: IE<8 +// Prevent attribute/property "interpolation" +// https://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx +if ( !assert( function( el ) { + el.innerHTML = ""; + return el.firstChild.getAttribute( "href" ) === "#"; +} ) ) { + addHandle( "type|href|height|width", function( elem, name, isXML ) { + if ( !isXML ) { + return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); + } + } ); +} + +// Support: IE<9 +// Use defaultValue in place of getAttribute("value") +if ( !support.attributes || !assert( function( el ) { + el.innerHTML = ""; + el.firstChild.setAttribute( "value", "" ); + return el.firstChild.getAttribute( "value" ) === ""; +} ) ) { + addHandle( "value", function( elem, _name, isXML ) { + if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { + return elem.defaultValue; + } + } ); +} + +// Support: IE<9 +// Use getAttributeNode to fetch booleans when getAttribute lies +if ( !assert( function( el ) { + return el.getAttribute( "disabled" ) == null; +} ) ) { + addHandle( booleans, function( elem, name, isXML ) { + var val; + if ( !isXML ) { + return elem[ name ] === true ? name.toLowerCase() : + ( val = elem.getAttributeNode( name ) ) && val.specified ? + val.value : + null; + } + } ); +} + +return Sizzle; + +} )( window ); + + + +jQuery.find = Sizzle; +jQuery.expr = Sizzle.selectors; + +// Deprecated +jQuery.expr[ ":" ] = jQuery.expr.pseudos; +jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort; +jQuery.text = Sizzle.getText; +jQuery.isXMLDoc = Sizzle.isXML; +jQuery.contains = Sizzle.contains; +jQuery.escapeSelector = Sizzle.escape; + + + + +var dir = function( elem, dir, until ) { + var matched = [], + truncate = until !== undefined; + + while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) { + if ( elem.nodeType === 1 ) { + if ( truncate && jQuery( elem ).is( until ) ) { + break; + } + matched.push( elem ); + } + } + return matched; +}; + + +var siblings = function( n, elem ) { + var matched = []; + + for ( ; n; n = n.nextSibling ) { + if ( n.nodeType === 1 && n !== elem ) { + matched.push( n ); + } + } + + return matched; +}; + + +var rneedsContext = jQuery.expr.match.needsContext; + + + +function nodeName( elem, name ) { + + return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); + +} +var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i ); + + + +// Implement the identical functionality for filter and not +function winnow( elements, qualifier, not ) { + if ( isFunction( qualifier ) ) { + return jQuery.grep( elements, function( elem, i ) { + return !!qualifier.call( elem, i, elem ) !== not; + } ); + } + + // Single element + if ( qualifier.nodeType ) { + return jQuery.grep( elements, function( elem ) { + return ( elem === qualifier ) !== not; + } ); + } + + // Arraylike of elements (jQuery, arguments, Array) + if ( typeof qualifier !== "string" ) { + return jQuery.grep( elements, function( elem ) { + return ( indexOf.call( qualifier, elem ) > -1 ) !== not; + } ); + } + + // Filtered directly for both simple and complex selectors + return jQuery.filter( qualifier, elements, not ); +} + +jQuery.filter = function( expr, elems, not ) { + var elem = elems[ 0 ]; + + if ( not ) { + expr = ":not(" + expr + ")"; + } + + if ( elems.length === 1 && elem.nodeType === 1 ) { + return jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : []; + } + + return jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { + return elem.nodeType === 1; + } ) ); +}; + +jQuery.fn.extend( { + find: function( selector ) { + var i, ret, + len = this.length, + self = this; + + if ( typeof selector !== "string" ) { + return this.pushStack( jQuery( selector ).filter( function() { + for ( i = 0; i < len; i++ ) { + if ( jQuery.contains( self[ i ], this ) ) { + return true; + } + } + } ) ); + } + + ret = this.pushStack( [] ); + + for ( i = 0; i < len; i++ ) { + jQuery.find( selector, self[ i ], ret ); + } + + return len > 1 ? jQuery.uniqueSort( ret ) : ret; + }, + filter: function( selector ) { + return this.pushStack( winnow( this, selector || [], false ) ); + }, + not: function( selector ) { + return this.pushStack( winnow( this, selector || [], true ) ); + }, + is: function( selector ) { + return !!winnow( + this, + + // If this is a positional/relative selector, check membership in the returned set + // so $("p:first").is("p:last") won't return true for a doc with two "p". + typeof selector === "string" && rneedsContext.test( selector ) ? + jQuery( selector ) : + selector || [], + false + ).length; + } +} ); + + +// Initialize a jQuery object + + +// A central reference to the root jQuery(document) +var rootjQuery, + + // A simple way to check for HTML strings + // Prioritize #id over to avoid XSS via location.hash (#9521) + // Strict HTML recognition (#11290: must start with <) + // Shortcut simple #id case for speed + rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/, + + init = jQuery.fn.init = function( selector, context, root ) { + var match, elem; + + // HANDLE: $(""), $(null), $(undefined), $(false) + if ( !selector ) { + return this; + } + + // Method init() accepts an alternate rootjQuery + // so migrate can support jQuery.sub (gh-2101) + root = root || rootjQuery; + + // Handle HTML strings + if ( typeof selector === "string" ) { + if ( selector[ 0 ] === "<" && + selector[ selector.length - 1 ] === ">" && + selector.length >= 3 ) { + + // Assume that strings that start and end with <> are HTML and skip the regex check + match = [ null, selector, null ]; + + } else { + match = rquickExpr.exec( selector ); + } + + // Match html or make sure no context is specified for #id + if ( match && ( match[ 1 ] || !context ) ) { + + // HANDLE: $(html) -> $(array) + if ( match[ 1 ] ) { + context = context instanceof jQuery ? context[ 0 ] : context; + + // Option to run scripts is true for back-compat + // Intentionally let the error be thrown if parseHTML is not present + jQuery.merge( this, jQuery.parseHTML( + match[ 1 ], + context && context.nodeType ? context.ownerDocument || context : document, + true + ) ); + + // HANDLE: $(html, props) + if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) { + for ( match in context ) { + + // Properties of context are called as methods if possible + if ( isFunction( this[ match ] ) ) { + this[ match ]( context[ match ] ); + + // ...and otherwise set as attributes + } else { + this.attr( match, context[ match ] ); + } + } + } + + return this; + + // HANDLE: $(#id) + } else { + elem = document.getElementById( match[ 2 ] ); + + if ( elem ) { + + // Inject the element directly into the jQuery object + this[ 0 ] = elem; + this.length = 1; + } + return this; + } + + // HANDLE: $(expr, $(...)) + } else if ( !context || context.jquery ) { + return ( context || root ).find( selector ); + + // HANDLE: $(expr, context) + // (which is just equivalent to: $(context).find(expr) + } else { + return this.constructor( context ).find( selector ); + } + + // HANDLE: $(DOMElement) + } else if ( selector.nodeType ) { + this[ 0 ] = selector; + this.length = 1; + return this; + + // HANDLE: $(function) + // Shortcut for document ready + } else if ( isFunction( selector ) ) { + return root.ready !== undefined ? + root.ready( selector ) : + + // Execute immediately if ready is not present + selector( jQuery ); + } + + return jQuery.makeArray( selector, this ); + }; + +// Give the init function the jQuery prototype for later instantiation +init.prototype = jQuery.fn; + +// Initialize central reference +rootjQuery = jQuery( document ); + + +var rparentsprev = /^(?:parents|prev(?:Until|All))/, + + // Methods guaranteed to produce a unique set when starting from a unique set + guaranteedUnique = { + children: true, + contents: true, + next: true, + prev: true + }; + +jQuery.fn.extend( { + has: function( target ) { + var targets = jQuery( target, this ), + l = targets.length; + + return this.filter( function() { + var i = 0; + for ( ; i < l; i++ ) { + if ( jQuery.contains( this, targets[ i ] ) ) { + return true; + } + } + } ); + }, + + closest: function( selectors, context ) { + var cur, + i = 0, + l = this.length, + matched = [], + targets = typeof selectors !== "string" && jQuery( selectors ); + + // Positional selectors never match, since there's no _selection_ context + if ( !rneedsContext.test( selectors ) ) { + for ( ; i < l; i++ ) { + for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) { + + // Always skip document fragments + if ( cur.nodeType < 11 && ( targets ? + targets.index( cur ) > -1 : + + // Don't pass non-elements to Sizzle + cur.nodeType === 1 && + jQuery.find.matchesSelector( cur, selectors ) ) ) { + + matched.push( cur ); + break; + } + } + } + } + + return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched ); + }, + + // Determine the position of an element within the set + index: function( elem ) { + + // No argument, return index in parent + if ( !elem ) { + return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1; + } + + // Index in selector + if ( typeof elem === "string" ) { + return indexOf.call( jQuery( elem ), this[ 0 ] ); + } + + // Locate the position of the desired element + return indexOf.call( this, + + // If it receives a jQuery object, the first element is used + elem.jquery ? elem[ 0 ] : elem + ); + }, + + add: function( selector, context ) { + return this.pushStack( + jQuery.uniqueSort( + jQuery.merge( this.get(), jQuery( selector, context ) ) + ) + ); + }, + + addBack: function( selector ) { + return this.add( selector == null ? + this.prevObject : this.prevObject.filter( selector ) + ); + } +} ); + +function sibling( cur, dir ) { + while ( ( cur = cur[ dir ] ) && cur.nodeType !== 1 ) {} + return cur; +} + +jQuery.each( { + parent: function( elem ) { + var parent = elem.parentNode; + return parent && parent.nodeType !== 11 ? parent : null; + }, + parents: function( elem ) { + return dir( elem, "parentNode" ); + }, + parentsUntil: function( elem, _i, until ) { + return dir( elem, "parentNode", until ); + }, + next: function( elem ) { + return sibling( elem, "nextSibling" ); + }, + prev: function( elem ) { + return sibling( elem, "previousSibling" ); + }, + nextAll: function( elem ) { + return dir( elem, "nextSibling" ); + }, + prevAll: function( elem ) { + return dir( elem, "previousSibling" ); + }, + nextUntil: function( elem, _i, until ) { + return dir( elem, "nextSibling", until ); + }, + prevUntil: function( elem, _i, until ) { + return dir( elem, "previousSibling", until ); + }, + siblings: function( elem ) { + return siblings( ( elem.parentNode || {} ).firstChild, elem ); + }, + children: function( elem ) { + return siblings( elem.firstChild ); + }, + contents: function( elem ) { + if ( elem.contentDocument != null && + + // Support: IE 11+ + // elements with no `data` attribute has an object + // `contentDocument` with a `null` prototype. + getProto( elem.contentDocument ) ) { + + return elem.contentDocument; + } + + // Support: IE 9 - 11 only, iOS 7 only, Android Browser <=4.3 only + // Treat the template element as a regular one in browsers that + // don't support it. + if ( nodeName( elem, "template" ) ) { + elem = elem.content || elem; + } + + return jQuery.merge( [], elem.childNodes ); + } +}, function( name, fn ) { + jQuery.fn[ name ] = function( until, selector ) { + var matched = jQuery.map( this, fn, until ); + + if ( name.slice( -5 ) !== "Until" ) { + selector = until; + } + + if ( selector && typeof selector === "string" ) { + matched = jQuery.filter( selector, matched ); + } + + if ( this.length > 1 ) { + + // Remove duplicates + if ( !guaranteedUnique[ name ] ) { + jQuery.uniqueSort( matched ); + } + + // Reverse order for parents* and prev-derivatives + if ( rparentsprev.test( name ) ) { + matched.reverse(); + } + } + + return this.pushStack( matched ); + }; +} ); +var rnothtmlwhite = ( /[^\x20\t\r\n\f]+/g ); + + + +// Convert String-formatted options into Object-formatted ones +function createOptions( options ) { + var object = {}; + jQuery.each( options.match( rnothtmlwhite ) || [], function( _, flag ) { + object[ flag ] = true; + } ); + return object; +} + +/* + * Create a callback list using the following parameters: + * + * options: an optional list of space-separated options that will change how + * the callback list behaves or a more traditional option object + * + * By default a callback list will act like an event callback list and can be + * "fired" multiple times. + * + * Possible options: + * + * once: will ensure the callback list can only be fired once (like a Deferred) + * + * memory: will keep track of previous values and will call any callback added + * after the list has been fired right away with the latest "memorized" + * values (like a Deferred) + * + * unique: will ensure a callback can only be added once (no duplicate in the list) + * + * stopOnFalse: interrupt callings when a callback returns false + * + */ +jQuery.Callbacks = function( options ) { + + // Convert options from String-formatted to Object-formatted if needed + // (we check in cache first) + options = typeof options === "string" ? + createOptions( options ) : + jQuery.extend( {}, options ); + + var // Flag to know if list is currently firing + firing, + + // Last fire value for non-forgettable lists + memory, + + // Flag to know if list was already fired + fired, + + // Flag to prevent firing + locked, + + // Actual callback list + list = [], + + // Queue of execution data for repeatable lists + queue = [], + + // Index of currently firing callback (modified by add/remove as needed) + firingIndex = -1, + + // Fire callbacks + fire = function() { + + // Enforce single-firing + locked = locked || options.once; + + // Execute callbacks for all pending executions, + // respecting firingIndex overrides and runtime changes + fired = firing = true; + for ( ; queue.length; firingIndex = -1 ) { + memory = queue.shift(); + while ( ++firingIndex < list.length ) { + + // Run callback and check for early termination + if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false && + options.stopOnFalse ) { + + // Jump to end and forget the data so .add doesn't re-fire + firingIndex = list.length; + memory = false; + } + } + } + + // Forget the data if we're done with it + if ( !options.memory ) { + memory = false; + } + + firing = false; + + // Clean up if we're done firing for good + if ( locked ) { + + // Keep an empty list if we have data for future add calls + if ( memory ) { + list = []; + + // Otherwise, this object is spent + } else { + list = ""; + } + } + }, + + // Actual Callbacks object + self = { + + // Add a callback or a collection of callbacks to the list + add: function() { + if ( list ) { + + // If we have memory from a past run, we should fire after adding + if ( memory && !firing ) { + firingIndex = list.length - 1; + queue.push( memory ); + } + + ( function add( args ) { + jQuery.each( args, function( _, arg ) { + if ( isFunction( arg ) ) { + if ( !options.unique || !self.has( arg ) ) { + list.push( arg ); + } + } else if ( arg && arg.length && toType( arg ) !== "string" ) { + + // Inspect recursively + add( arg ); + } + } ); + } )( arguments ); + + if ( memory && !firing ) { + fire(); + } + } + return this; + }, + + // Remove a callback from the list + remove: function() { + jQuery.each( arguments, function( _, arg ) { + var index; + while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { + list.splice( index, 1 ); + + // Handle firing indexes + if ( index <= firingIndex ) { + firingIndex--; + } + } + } ); + return this; + }, + + // Check if a given callback is in the list. + // If no argument is given, return whether or not list has callbacks attached. + has: function( fn ) { + return fn ? + jQuery.inArray( fn, list ) > -1 : + list.length > 0; + }, + + // Remove all callbacks from the list + empty: function() { + if ( list ) { + list = []; + } + return this; + }, + + // Disable .fire and .add + // Abort any current/pending executions + // Clear all callbacks and values + disable: function() { + locked = queue = []; + list = memory = ""; + return this; + }, + disabled: function() { + return !list; + }, + + // Disable .fire + // Also disable .add unless we have memory (since it would have no effect) + // Abort any pending executions + lock: function() { + locked = queue = []; + if ( !memory && !firing ) { + list = memory = ""; + } + return this; + }, + locked: function() { + return !!locked; + }, + + // Call all callbacks with the given context and arguments + fireWith: function( context, args ) { + if ( !locked ) { + args = args || []; + args = [ context, args.slice ? args.slice() : args ]; + queue.push( args ); + if ( !firing ) { + fire(); + } + } + return this; + }, + + // Call all the callbacks with the given arguments + fire: function() { + self.fireWith( this, arguments ); + return this; + }, + + // To know if the callbacks have already been called at least once + fired: function() { + return !!fired; + } + }; + + return self; +}; + + +function Identity( v ) { + return v; +} +function Thrower( ex ) { + throw ex; +} + +function adoptValue( value, resolve, reject, noValue ) { + var method; + + try { + + // Check for promise aspect first to privilege synchronous behavior + if ( value && isFunction( ( method = value.promise ) ) ) { + method.call( value ).done( resolve ).fail( reject ); + + // Other thenables + } else if ( value && isFunction( ( method = value.then ) ) ) { + method.call( value, resolve, reject ); + + // Other non-thenables + } else { + + // Control `resolve` arguments by letting Array#slice cast boolean `noValue` to integer: + // * false: [ value ].slice( 0 ) => resolve( value ) + // * true: [ value ].slice( 1 ) => resolve() + resolve.apply( undefined, [ value ].slice( noValue ) ); + } + + // For Promises/A+, convert exceptions into rejections + // Since jQuery.when doesn't unwrap thenables, we can skip the extra checks appearing in + // Deferred#then to conditionally suppress rejection. + } catch ( value ) { + + // Support: Android 4.0 only + // Strict mode functions invoked without .call/.apply get global-object context + reject.apply( undefined, [ value ] ); + } +} + +jQuery.extend( { + + Deferred: function( func ) { + var tuples = [ + + // action, add listener, callbacks, + // ... .then handlers, argument index, [final state] + [ "notify", "progress", jQuery.Callbacks( "memory" ), + jQuery.Callbacks( "memory" ), 2 ], + [ "resolve", "done", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 0, "resolved" ], + [ "reject", "fail", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 1, "rejected" ] + ], + state = "pending", + promise = { + state: function() { + return state; + }, + always: function() { + deferred.done( arguments ).fail( arguments ); + return this; + }, + "catch": function( fn ) { + return promise.then( null, fn ); + }, + + // Keep pipe for back-compat + pipe: function( /* fnDone, fnFail, fnProgress */ ) { + var fns = arguments; + + return jQuery.Deferred( function( newDefer ) { + jQuery.each( tuples, function( _i, tuple ) { + + // Map tuples (progress, done, fail) to arguments (done, fail, progress) + var fn = isFunction( fns[ tuple[ 4 ] ] ) && fns[ tuple[ 4 ] ]; + + // deferred.progress(function() { bind to newDefer or newDefer.notify }) + // deferred.done(function() { bind to newDefer or newDefer.resolve }) + // deferred.fail(function() { bind to newDefer or newDefer.reject }) + deferred[ tuple[ 1 ] ]( function() { + var returned = fn && fn.apply( this, arguments ); + if ( returned && isFunction( returned.promise ) ) { + returned.promise() + .progress( newDefer.notify ) + .done( newDefer.resolve ) + .fail( newDefer.reject ); + } else { + newDefer[ tuple[ 0 ] + "With" ]( + this, + fn ? [ returned ] : arguments + ); + } + } ); + } ); + fns = null; + } ).promise(); + }, + then: function( onFulfilled, onRejected, onProgress ) { + var maxDepth = 0; + function resolve( depth, deferred, handler, special ) { + return function() { + var that = this, + args = arguments, + mightThrow = function() { + var returned, then; + + // Support: Promises/A+ section 2.3.3.3.3 + // https://promisesaplus.com/#point-59 + // Ignore double-resolution attempts + if ( depth < maxDepth ) { + return; + } + + returned = handler.apply( that, args ); + + // Support: Promises/A+ section 2.3.1 + // https://promisesaplus.com/#point-48 + if ( returned === deferred.promise() ) { + throw new TypeError( "Thenable self-resolution" ); + } + + // Support: Promises/A+ sections 2.3.3.1, 3.5 + // https://promisesaplus.com/#point-54 + // https://promisesaplus.com/#point-75 + // Retrieve `then` only once + then = returned && + + // Support: Promises/A+ section 2.3.4 + // https://promisesaplus.com/#point-64 + // Only check objects and functions for thenability + ( typeof returned === "object" || + typeof returned === "function" ) && + returned.then; + + // Handle a returned thenable + if ( isFunction( then ) ) { + + // Special processors (notify) just wait for resolution + if ( special ) { + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ) + ); + + // Normal processors (resolve) also hook into progress + } else { + + // ...and disregard older resolution values + maxDepth++; + + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ), + resolve( maxDepth, deferred, Identity, + deferred.notifyWith ) + ); + } + + // Handle all other returned values + } else { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Identity ) { + that = undefined; + args = [ returned ]; + } + + // Process the value(s) + // Default process is resolve + ( special || deferred.resolveWith )( that, args ); + } + }, + + // Only normal processors (resolve) catch and reject exceptions + process = special ? + mightThrow : + function() { + try { + mightThrow(); + } catch ( e ) { + + if ( jQuery.Deferred.exceptionHook ) { + jQuery.Deferred.exceptionHook( e, + process.stackTrace ); + } + + // Support: Promises/A+ section 2.3.3.3.4.1 + // https://promisesaplus.com/#point-61 + // Ignore post-resolution exceptions + if ( depth + 1 >= maxDepth ) { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Thrower ) { + that = undefined; + args = [ e ]; + } + + deferred.rejectWith( that, args ); + } + } + }; + + // Support: Promises/A+ section 2.3.3.3.1 + // https://promisesaplus.com/#point-57 + // Re-resolve promises immediately to dodge false rejection from + // subsequent errors + if ( depth ) { + process(); + } else { + + // Call an optional hook to record the stack, in case of exception + // since it's otherwise lost when execution goes async + if ( jQuery.Deferred.getStackHook ) { + process.stackTrace = jQuery.Deferred.getStackHook(); + } + window.setTimeout( process ); + } + }; + } + + return jQuery.Deferred( function( newDefer ) { + + // progress_handlers.add( ... ) + tuples[ 0 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onProgress ) ? + onProgress : + Identity, + newDefer.notifyWith + ) + ); + + // fulfilled_handlers.add( ... ) + tuples[ 1 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onFulfilled ) ? + onFulfilled : + Identity + ) + ); + + // rejected_handlers.add( ... ) + tuples[ 2 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onRejected ) ? + onRejected : + Thrower + ) + ); + } ).promise(); + }, + + // Get a promise for this deferred + // If obj is provided, the promise aspect is added to the object + promise: function( obj ) { + return obj != null ? jQuery.extend( obj, promise ) : promise; + } + }, + deferred = {}; + + // Add list-specific methods + jQuery.each( tuples, function( i, tuple ) { + var list = tuple[ 2 ], + stateString = tuple[ 5 ]; + + // promise.progress = list.add + // promise.done = list.add + // promise.fail = list.add + promise[ tuple[ 1 ] ] = list.add; + + // Handle state + if ( stateString ) { + list.add( + function() { + + // state = "resolved" (i.e., fulfilled) + // state = "rejected" + state = stateString; + }, + + // rejected_callbacks.disable + // fulfilled_callbacks.disable + tuples[ 3 - i ][ 2 ].disable, + + // rejected_handlers.disable + // fulfilled_handlers.disable + tuples[ 3 - i ][ 3 ].disable, + + // progress_callbacks.lock + tuples[ 0 ][ 2 ].lock, + + // progress_handlers.lock + tuples[ 0 ][ 3 ].lock + ); + } + + // progress_handlers.fire + // fulfilled_handlers.fire + // rejected_handlers.fire + list.add( tuple[ 3 ].fire ); + + // deferred.notify = function() { deferred.notifyWith(...) } + // deferred.resolve = function() { deferred.resolveWith(...) } + // deferred.reject = function() { deferred.rejectWith(...) } + deferred[ tuple[ 0 ] ] = function() { + deferred[ tuple[ 0 ] + "With" ]( this === deferred ? undefined : this, arguments ); + return this; + }; + + // deferred.notifyWith = list.fireWith + // deferred.resolveWith = list.fireWith + // deferred.rejectWith = list.fireWith + deferred[ tuple[ 0 ] + "With" ] = list.fireWith; + } ); + + // Make the deferred a promise + promise.promise( deferred ); + + // Call given func if any + if ( func ) { + func.call( deferred, deferred ); + } + + // All done! + return deferred; + }, + + // Deferred helper + when: function( singleValue ) { + var + + // count of uncompleted subordinates + remaining = arguments.length, + + // count of unprocessed arguments + i = remaining, + + // subordinate fulfillment data + resolveContexts = Array( i ), + resolveValues = slice.call( arguments ), + + // the primary Deferred + primary = jQuery.Deferred(), + + // subordinate callback factory + updateFunc = function( i ) { + return function( value ) { + resolveContexts[ i ] = this; + resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value; + if ( !( --remaining ) ) { + primary.resolveWith( resolveContexts, resolveValues ); + } + }; + }; + + // Single- and empty arguments are adopted like Promise.resolve + if ( remaining <= 1 ) { + adoptValue( singleValue, primary.done( updateFunc( i ) ).resolve, primary.reject, + !remaining ); + + // Use .then() to unwrap secondary thenables (cf. gh-3000) + if ( primary.state() === "pending" || + isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) { + + return primary.then(); + } + } + + // Multiple arguments are aggregated like Promise.all array elements + while ( i-- ) { + adoptValue( resolveValues[ i ], updateFunc( i ), primary.reject ); + } + + return primary.promise(); + } +} ); + + +// These usually indicate a programmer mistake during development, +// warn about them ASAP rather than swallowing them by default. +var rerrorNames = /^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/; + +jQuery.Deferred.exceptionHook = function( error, stack ) { + + // Support: IE 8 - 9 only + // Console exists when dev tools are open, which can happen at any time + if ( window.console && window.console.warn && error && rerrorNames.test( error.name ) ) { + window.console.warn( "jQuery.Deferred exception: " + error.message, error.stack, stack ); + } +}; + + + + +jQuery.readyException = function( error ) { + window.setTimeout( function() { + throw error; + } ); +}; + + + + +// The deferred used on DOM ready +var readyList = jQuery.Deferred(); + +jQuery.fn.ready = function( fn ) { + + readyList + .then( fn ) + + // Wrap jQuery.readyException in a function so that the lookup + // happens at the time of error handling instead of callback + // registration. + .catch( function( error ) { + jQuery.readyException( error ); + } ); + + return this; +}; + +jQuery.extend( { + + // Is the DOM ready to be used? Set to true once it occurs. + isReady: false, + + // A counter to track how many items to wait for before + // the ready event fires. See #6781 + readyWait: 1, + + // Handle when the DOM is ready + ready: function( wait ) { + + // Abort if there are pending holds or we're already ready + if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { + return; + } + + // Remember that the DOM is ready + jQuery.isReady = true; + + // If a normal DOM Ready event fired, decrement, and wait if need be + if ( wait !== true && --jQuery.readyWait > 0 ) { + return; + } + + // If there are functions bound, to execute + readyList.resolveWith( document, [ jQuery ] ); + } +} ); + +jQuery.ready.then = readyList.then; + +// The ready event handler and self cleanup method +function completed() { + document.removeEventListener( "DOMContentLoaded", completed ); + window.removeEventListener( "load", completed ); + jQuery.ready(); +} + +// Catch cases where $(document).ready() is called +// after the browser event has already occurred. +// Support: IE <=9 - 10 only +// Older IE sometimes signals "interactive" too soon +if ( document.readyState === "complete" || + ( document.readyState !== "loading" && !document.documentElement.doScroll ) ) { + + // Handle it asynchronously to allow scripts the opportunity to delay ready + window.setTimeout( jQuery.ready ); + +} else { + + // Use the handy event callback + document.addEventListener( "DOMContentLoaded", completed ); + + // A fallback to window.onload, that will always work + window.addEventListener( "load", completed ); +} + + + + +// Multifunctional method to get and set values of a collection +// The value/s can optionally be executed if it's a function +var access = function( elems, fn, key, value, chainable, emptyGet, raw ) { + var i = 0, + len = elems.length, + bulk = key == null; + + // Sets many values + if ( toType( key ) === "object" ) { + chainable = true; + for ( i in key ) { + access( elems, fn, i, key[ i ], true, emptyGet, raw ); + } + + // Sets one value + } else if ( value !== undefined ) { + chainable = true; + + if ( !isFunction( value ) ) { + raw = true; + } + + if ( bulk ) { + + // Bulk operations run against the entire set + if ( raw ) { + fn.call( elems, value ); + fn = null; + + // ...except when executing function values + } else { + bulk = fn; + fn = function( elem, _key, value ) { + return bulk.call( jQuery( elem ), value ); + }; + } + } + + if ( fn ) { + for ( ; i < len; i++ ) { + fn( + elems[ i ], key, raw ? + value : + value.call( elems[ i ], i, fn( elems[ i ], key ) ) + ); + } + } + } + + if ( chainable ) { + return elems; + } + + // Gets + if ( bulk ) { + return fn.call( elems ); + } + + return len ? fn( elems[ 0 ], key ) : emptyGet; +}; + + +// Matches dashed string for camelizing +var rmsPrefix = /^-ms-/, + rdashAlpha = /-([a-z])/g; + +// Used by camelCase as callback to replace() +function fcamelCase( _all, letter ) { + return letter.toUpperCase(); +} + +// Convert dashed to camelCase; used by the css and data modules +// Support: IE <=9 - 11, Edge 12 - 15 +// Microsoft forgot to hump their vendor prefix (#9572) +function camelCase( string ) { + return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); +} +var acceptData = function( owner ) { + + // Accepts only: + // - Node + // - Node.ELEMENT_NODE + // - Node.DOCUMENT_NODE + // - Object + // - Any + return owner.nodeType === 1 || owner.nodeType === 9 || !( +owner.nodeType ); +}; + + + + +function Data() { + this.expando = jQuery.expando + Data.uid++; +} + +Data.uid = 1; + +Data.prototype = { + + cache: function( owner ) { + + // Check if the owner object already has a cache + var value = owner[ this.expando ]; + + // If not, create one + if ( !value ) { + value = {}; + + // We can accept data for non-element nodes in modern browsers, + // but we should not, see #8335. + // Always return an empty object. + if ( acceptData( owner ) ) { + + // If it is a node unlikely to be stringify-ed or looped over + // use plain assignment + if ( owner.nodeType ) { + owner[ this.expando ] = value; + + // Otherwise secure it in a non-enumerable property + // configurable must be true to allow the property to be + // deleted when data is removed + } else { + Object.defineProperty( owner, this.expando, { + value: value, + configurable: true + } ); + } + } + } + + return value; + }, + set: function( owner, data, value ) { + var prop, + cache = this.cache( owner ); + + // Handle: [ owner, key, value ] args + // Always use camelCase key (gh-2257) + if ( typeof data === "string" ) { + cache[ camelCase( data ) ] = value; + + // Handle: [ owner, { properties } ] args + } else { + + // Copy the properties one-by-one to the cache object + for ( prop in data ) { + cache[ camelCase( prop ) ] = data[ prop ]; + } + } + return cache; + }, + get: function( owner, key ) { + return key === undefined ? + this.cache( owner ) : + + // Always use camelCase key (gh-2257) + owner[ this.expando ] && owner[ this.expando ][ camelCase( key ) ]; + }, + access: function( owner, key, value ) { + + // In cases where either: + // + // 1. No key was specified + // 2. A string key was specified, but no value provided + // + // Take the "read" path and allow the get method to determine + // which value to return, respectively either: + // + // 1. The entire cache object + // 2. The data stored at the key + // + if ( key === undefined || + ( ( key && typeof key === "string" ) && value === undefined ) ) { + + return this.get( owner, key ); + } + + // When the key is not a string, or both a key and value + // are specified, set or extend (existing objects) with either: + // + // 1. An object of properties + // 2. A key and value + // + this.set( owner, key, value ); + + // Since the "set" path can have two possible entry points + // return the expected data based on which path was taken[*] + return value !== undefined ? value : key; + }, + remove: function( owner, key ) { + var i, + cache = owner[ this.expando ]; + + if ( cache === undefined ) { + return; + } + + if ( key !== undefined ) { + + // Support array or space separated string of keys + if ( Array.isArray( key ) ) { + + // If key is an array of keys... + // We always set camelCase keys, so remove that. + key = key.map( camelCase ); + } else { + key = camelCase( key ); + + // If a key with the spaces exists, use it. + // Otherwise, create an array by matching non-whitespace + key = key in cache ? + [ key ] : + ( key.match( rnothtmlwhite ) || [] ); + } + + i = key.length; + + while ( i-- ) { + delete cache[ key[ i ] ]; + } + } + + // Remove the expando if there's no more data + if ( key === undefined || jQuery.isEmptyObject( cache ) ) { + + // Support: Chrome <=35 - 45 + // Webkit & Blink performance suffers when deleting properties + // from DOM nodes, so set to undefined instead + // https://bugs.chromium.org/p/chromium/issues/detail?id=378607 (bug restricted) + if ( owner.nodeType ) { + owner[ this.expando ] = undefined; + } else { + delete owner[ this.expando ]; + } + } + }, + hasData: function( owner ) { + var cache = owner[ this.expando ]; + return cache !== undefined && !jQuery.isEmptyObject( cache ); + } +}; +var dataPriv = new Data(); + +var dataUser = new Data(); + + + +// Implementation Summary +// +// 1. Enforce API surface and semantic compatibility with 1.9.x branch +// 2. Improve the module's maintainability by reducing the storage +// paths to a single mechanism. +// 3. Use the same single mechanism to support "private" and "user" data. +// 4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData) +// 5. Avoid exposing implementation details on user objects (eg. expando properties) +// 6. Provide a clear path for implementation upgrade to WeakMap in 2014 + +var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/, + rmultiDash = /[A-Z]/g; + +function getData( data ) { + if ( data === "true" ) { + return true; + } + + if ( data === "false" ) { + return false; + } + + if ( data === "null" ) { + return null; + } + + // Only convert to a number if it doesn't change the string + if ( data === +data + "" ) { + return +data; + } + + if ( rbrace.test( data ) ) { + return JSON.parse( data ); + } + + return data; +} + +function dataAttr( elem, key, data ) { + var name; + + // If nothing was found internally, try to fetch any + // data from the HTML5 data-* attribute + if ( data === undefined && elem.nodeType === 1 ) { + name = "data-" + key.replace( rmultiDash, "-$&" ).toLowerCase(); + data = elem.getAttribute( name ); + + if ( typeof data === "string" ) { + try { + data = getData( data ); + } catch ( e ) {} + + // Make sure we set the data so it isn't changed later + dataUser.set( elem, key, data ); + } else { + data = undefined; + } + } + return data; +} + +jQuery.extend( { + hasData: function( elem ) { + return dataUser.hasData( elem ) || dataPriv.hasData( elem ); + }, + + data: function( elem, name, data ) { + return dataUser.access( elem, name, data ); + }, + + removeData: function( elem, name ) { + dataUser.remove( elem, name ); + }, + + // TODO: Now that all calls to _data and _removeData have been replaced + // with direct calls to dataPriv methods, these can be deprecated. + _data: function( elem, name, data ) { + return dataPriv.access( elem, name, data ); + }, + + _removeData: function( elem, name ) { + dataPriv.remove( elem, name ); + } +} ); + +jQuery.fn.extend( { + data: function( key, value ) { + var i, name, data, + elem = this[ 0 ], + attrs = elem && elem.attributes; + + // Gets all values + if ( key === undefined ) { + if ( this.length ) { + data = dataUser.get( elem ); + + if ( elem.nodeType === 1 && !dataPriv.get( elem, "hasDataAttrs" ) ) { + i = attrs.length; + while ( i-- ) { + + // Support: IE 11 only + // The attrs elements can be null (#14894) + if ( attrs[ i ] ) { + name = attrs[ i ].name; + if ( name.indexOf( "data-" ) === 0 ) { + name = camelCase( name.slice( 5 ) ); + dataAttr( elem, name, data[ name ] ); + } + } + } + dataPriv.set( elem, "hasDataAttrs", true ); + } + } + + return data; + } + + // Sets multiple values + if ( typeof key === "object" ) { + return this.each( function() { + dataUser.set( this, key ); + } ); + } + + return access( this, function( value ) { + var data; + + // The calling jQuery object (element matches) is not empty + // (and therefore has an element appears at this[ 0 ]) and the + // `value` parameter was not undefined. An empty jQuery object + // will result in `undefined` for elem = this[ 0 ] which will + // throw an exception if an attempt to read a data cache is made. + if ( elem && value === undefined ) { + + // Attempt to get data from the cache + // The key will always be camelCased in Data + data = dataUser.get( elem, key ); + if ( data !== undefined ) { + return data; + } + + // Attempt to "discover" the data in + // HTML5 custom data-* attrs + data = dataAttr( elem, key ); + if ( data !== undefined ) { + return data; + } + + // We tried really hard, but the data doesn't exist. + return; + } + + // Set the data... + this.each( function() { + + // We always store the camelCased key + dataUser.set( this, key, value ); + } ); + }, null, value, arguments.length > 1, null, true ); + }, + + removeData: function( key ) { + return this.each( function() { + dataUser.remove( this, key ); + } ); + } +} ); + + +jQuery.extend( { + queue: function( elem, type, data ) { + var queue; + + if ( elem ) { + type = ( type || "fx" ) + "queue"; + queue = dataPriv.get( elem, type ); + + // Speed up dequeue by getting out quickly if this is just a lookup + if ( data ) { + if ( !queue || Array.isArray( data ) ) { + queue = dataPriv.access( elem, type, jQuery.makeArray( data ) ); + } else { + queue.push( data ); + } + } + return queue || []; + } + }, + + dequeue: function( elem, type ) { + type = type || "fx"; + + var queue = jQuery.queue( elem, type ), + startLength = queue.length, + fn = queue.shift(), + hooks = jQuery._queueHooks( elem, type ), + next = function() { + jQuery.dequeue( elem, type ); + }; + + // If the fx queue is dequeued, always remove the progress sentinel + if ( fn === "inprogress" ) { + fn = queue.shift(); + startLength--; + } + + if ( fn ) { + + // Add a progress sentinel to prevent the fx queue from being + // automatically dequeued + if ( type === "fx" ) { + queue.unshift( "inprogress" ); + } + + // Clear up the last queue stop function + delete hooks.stop; + fn.call( elem, next, hooks ); + } + + if ( !startLength && hooks ) { + hooks.empty.fire(); + } + }, + + // Not public - generate a queueHooks object, or return the current one + _queueHooks: function( elem, type ) { + var key = type + "queueHooks"; + return dataPriv.get( elem, key ) || dataPriv.access( elem, key, { + empty: jQuery.Callbacks( "once memory" ).add( function() { + dataPriv.remove( elem, [ type + "queue", key ] ); + } ) + } ); + } +} ); + +jQuery.fn.extend( { + queue: function( type, data ) { + var setter = 2; + + if ( typeof type !== "string" ) { + data = type; + type = "fx"; + setter--; + } + + if ( arguments.length < setter ) { + return jQuery.queue( this[ 0 ], type ); + } + + return data === undefined ? + this : + this.each( function() { + var queue = jQuery.queue( this, type, data ); + + // Ensure a hooks for this queue + jQuery._queueHooks( this, type ); + + if ( type === "fx" && queue[ 0 ] !== "inprogress" ) { + jQuery.dequeue( this, type ); + } + } ); + }, + dequeue: function( type ) { + return this.each( function() { + jQuery.dequeue( this, type ); + } ); + }, + clearQueue: function( type ) { + return this.queue( type || "fx", [] ); + }, + + // Get a promise resolved when queues of a certain type + // are emptied (fx is the type by default) + promise: function( type, obj ) { + var tmp, + count = 1, + defer = jQuery.Deferred(), + elements = this, + i = this.length, + resolve = function() { + if ( !( --count ) ) { + defer.resolveWith( elements, [ elements ] ); + } + }; + + if ( typeof type !== "string" ) { + obj = type; + type = undefined; + } + type = type || "fx"; + + while ( i-- ) { + tmp = dataPriv.get( elements[ i ], type + "queueHooks" ); + if ( tmp && tmp.empty ) { + count++; + tmp.empty.add( resolve ); + } + } + resolve(); + return defer.promise( obj ); + } +} ); +var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source; + +var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" ); + + +var cssExpand = [ "Top", "Right", "Bottom", "Left" ]; + +var documentElement = document.documentElement; + + + + var isAttached = function( elem ) { + return jQuery.contains( elem.ownerDocument, elem ); + }, + composed = { composed: true }; + + // Support: IE 9 - 11+, Edge 12 - 18+, iOS 10.0 - 10.2 only + // Check attachment across shadow DOM boundaries when possible (gh-3504) + // Support: iOS 10.0-10.2 only + // Early iOS 10 versions support `attachShadow` but not `getRootNode`, + // leading to errors. We need to check for `getRootNode`. + if ( documentElement.getRootNode ) { + isAttached = function( elem ) { + return jQuery.contains( elem.ownerDocument, elem ) || + elem.getRootNode( composed ) === elem.ownerDocument; + }; + } +var isHiddenWithinTree = function( elem, el ) { + + // isHiddenWithinTree might be called from jQuery#filter function; + // in that case, element will be second argument + elem = el || elem; + + // Inline style trumps all + return elem.style.display === "none" || + elem.style.display === "" && + + // Otherwise, check computed style + // Support: Firefox <=43 - 45 + // Disconnected elements can have computed display: none, so first confirm that elem is + // in the document. + isAttached( elem ) && + + jQuery.css( elem, "display" ) === "none"; + }; + + + +function adjustCSS( elem, prop, valueParts, tween ) { + var adjusted, scale, + maxIterations = 20, + currentValue = tween ? + function() { + return tween.cur(); + } : + function() { + return jQuery.css( elem, prop, "" ); + }, + initial = currentValue(), + unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ), + + // Starting value computation is required for potential unit mismatches + initialInUnit = elem.nodeType && + ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) && + rcssNum.exec( jQuery.css( elem, prop ) ); + + if ( initialInUnit && initialInUnit[ 3 ] !== unit ) { + + // Support: Firefox <=54 + // Halve the iteration target value to prevent interference from CSS upper bounds (gh-2144) + initial = initial / 2; + + // Trust units reported by jQuery.css + unit = unit || initialInUnit[ 3 ]; + + // Iteratively approximate from a nonzero starting point + initialInUnit = +initial || 1; + + while ( maxIterations-- ) { + + // Evaluate and update our best guess (doubling guesses that zero out). + // Finish if the scale equals or crosses 1 (making the old*new product non-positive). + jQuery.style( elem, prop, initialInUnit + unit ); + if ( ( 1 - scale ) * ( 1 - ( scale = currentValue() / initial || 0.5 ) ) <= 0 ) { + maxIterations = 0; + } + initialInUnit = initialInUnit / scale; + + } + + initialInUnit = initialInUnit * 2; + jQuery.style( elem, prop, initialInUnit + unit ); + + // Make sure we update the tween properties later on + valueParts = valueParts || []; + } + + if ( valueParts ) { + initialInUnit = +initialInUnit || +initial || 0; + + // Apply relative offset (+=/-=) if specified + adjusted = valueParts[ 1 ] ? + initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] : + +valueParts[ 2 ]; + if ( tween ) { + tween.unit = unit; + tween.start = initialInUnit; + tween.end = adjusted; + } + } + return adjusted; +} + + +var defaultDisplayMap = {}; + +function getDefaultDisplay( elem ) { + var temp, + doc = elem.ownerDocument, + nodeName = elem.nodeName, + display = defaultDisplayMap[ nodeName ]; + + if ( display ) { + return display; + } + + temp = doc.body.appendChild( doc.createElement( nodeName ) ); + display = jQuery.css( temp, "display" ); + + temp.parentNode.removeChild( temp ); + + if ( display === "none" ) { + display = "block"; + } + defaultDisplayMap[ nodeName ] = display; + + return display; +} + +function showHide( elements, show ) { + var display, elem, + values = [], + index = 0, + length = elements.length; + + // Determine new display value for elements that need to change + for ( ; index < length; index++ ) { + elem = elements[ index ]; + if ( !elem.style ) { + continue; + } + + display = elem.style.display; + if ( show ) { + + // Since we force visibility upon cascade-hidden elements, an immediate (and slow) + // check is required in this first loop unless we have a nonempty display value (either + // inline or about-to-be-restored) + if ( display === "none" ) { + values[ index ] = dataPriv.get( elem, "display" ) || null; + if ( !values[ index ] ) { + elem.style.display = ""; + } + } + if ( elem.style.display === "" && isHiddenWithinTree( elem ) ) { + values[ index ] = getDefaultDisplay( elem ); + } + } else { + if ( display !== "none" ) { + values[ index ] = "none"; + + // Remember what we're overwriting + dataPriv.set( elem, "display", display ); + } + } + } + + // Set the display of the elements in a second loop to avoid constant reflow + for ( index = 0; index < length; index++ ) { + if ( values[ index ] != null ) { + elements[ index ].style.display = values[ index ]; + } + } + + return elements; +} + +jQuery.fn.extend( { + show: function() { + return showHide( this, true ); + }, + hide: function() { + return showHide( this ); + }, + toggle: function( state ) { + if ( typeof state === "boolean" ) { + return state ? this.show() : this.hide(); + } + + return this.each( function() { + if ( isHiddenWithinTree( this ) ) { + jQuery( this ).show(); + } else { + jQuery( this ).hide(); + } + } ); + } +} ); +var rcheckableType = ( /^(?:checkbox|radio)$/i ); + +var rtagName = ( /<([a-z][^\/\0>\x20\t\r\n\f]*)/i ); + +var rscriptType = ( /^$|^module$|\/(?:java|ecma)script/i ); + + + +( function() { + var fragment = document.createDocumentFragment(), + div = fragment.appendChild( document.createElement( "div" ) ), + input = document.createElement( "input" ); + + // Support: Android 4.0 - 4.3 only + // Check state lost if the name is set (#11217) + // Support: Windows Web Apps (WWA) + // `name` and `type` must use .setAttribute for WWA (#14901) + input.setAttribute( "type", "radio" ); + input.setAttribute( "checked", "checked" ); + input.setAttribute( "name", "t" ); + + div.appendChild( input ); + + // Support: Android <=4.1 only + // Older WebKit doesn't clone checked state correctly in fragments + support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked; + + // Support: IE <=11 only + // Make sure textarea (and checkbox) defaultValue is properly cloned + div.innerHTML = ""; + support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue; + + // Support: IE <=9 only + // IE <=9 replaces "; + support.option = !!div.lastChild; +} )(); + + +// We have to close these tags to support XHTML (#13200) +var wrapMap = { + + // XHTML parsers do not magically insert elements in the + // same way that tag soup parsers do. So we cannot shorten + // this by omitting or other required elements. + thead: [ 1, "", "
" ], + col: [ 2, "", "
" ], + tr: [ 2, "", "
" ], + td: [ 3, "", "
" ], + + _default: [ 0, "", "" ] +}; + +wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; +wrapMap.th = wrapMap.td; + +// Support: IE <=9 only +if ( !support.option ) { + wrapMap.optgroup = wrapMap.option = [ 1, "" ]; +} + + +function getAll( context, tag ) { + + // Support: IE <=9 - 11 only + // Use typeof to avoid zero-argument method invocation on host objects (#15151) + var ret; + + if ( typeof context.getElementsByTagName !== "undefined" ) { + ret = context.getElementsByTagName( tag || "*" ); + + } else if ( typeof context.querySelectorAll !== "undefined" ) { + ret = context.querySelectorAll( tag || "*" ); + + } else { + ret = []; + } + + if ( tag === undefined || tag && nodeName( context, tag ) ) { + return jQuery.merge( [ context ], ret ); + } + + return ret; +} + + +// Mark scripts as having already been evaluated +function setGlobalEval( elems, refElements ) { + var i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + dataPriv.set( + elems[ i ], + "globalEval", + !refElements || dataPriv.get( refElements[ i ], "globalEval" ) + ); + } +} + + +var rhtml = /<|&#?\w+;/; + +function buildFragment( elems, context, scripts, selection, ignored ) { + var elem, tmp, tag, wrap, attached, j, + fragment = context.createDocumentFragment(), + nodes = [], + i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + elem = elems[ i ]; + + if ( elem || elem === 0 ) { + + // Add nodes directly + if ( toType( elem ) === "object" ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); + + // Convert non-html into a text node + } else if ( !rhtml.test( elem ) ) { + nodes.push( context.createTextNode( elem ) ); + + // Convert html into DOM nodes + } else { + tmp = tmp || fragment.appendChild( context.createElement( "div" ) ); + + // Deserialize a standard representation + tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase(); + wrap = wrapMap[ tag ] || wrapMap._default; + tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ]; + + // Descend through wrappers to the right content + j = wrap[ 0 ]; + while ( j-- ) { + tmp = tmp.lastChild; + } + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, tmp.childNodes ); + + // Remember the top-level container + tmp = fragment.firstChild; + + // Ensure the created nodes are orphaned (#12392) + tmp.textContent = ""; + } + } + } + + // Remove wrapper from fragment + fragment.textContent = ""; + + i = 0; + while ( ( elem = nodes[ i++ ] ) ) { + + // Skip elements already in the context collection (trac-4087) + if ( selection && jQuery.inArray( elem, selection ) > -1 ) { + if ( ignored ) { + ignored.push( elem ); + } + continue; + } + + attached = isAttached( elem ); + + // Append to fragment + tmp = getAll( fragment.appendChild( elem ), "script" ); + + // Preserve script evaluation history + if ( attached ) { + setGlobalEval( tmp ); + } + + // Capture executables + if ( scripts ) { + j = 0; + while ( ( elem = tmp[ j++ ] ) ) { + if ( rscriptType.test( elem.type || "" ) ) { + scripts.push( elem ); + } + } + } + } + + return fragment; +} + + +var rtypenamespace = /^([^.]*)(?:\.(.+)|)/; + +function returnTrue() { + return true; +} + +function returnFalse() { + return false; +} + +// Support: IE <=9 - 11+ +// focus() and blur() are asynchronous, except when they are no-op. +// So expect focus to be synchronous when the element is already active, +// and blur to be synchronous when the element is not already active. +// (focus and blur are always synchronous in other supported browsers, +// this just defines when we can count on it). +function expectSync( elem, type ) { + return ( elem === safeActiveElement() ) === ( type === "focus" ); +} + +// Support: IE <=9 only +// Accessing document.activeElement can throw unexpectedly +// https://bugs.jquery.com/ticket/13393 +function safeActiveElement() { + try { + return document.activeElement; + } catch ( err ) { } +} + +function on( elem, types, selector, data, fn, one ) { + var origFn, type; + + // Types can be a map of types/handlers + if ( typeof types === "object" ) { + + // ( types-Object, selector, data ) + if ( typeof selector !== "string" ) { + + // ( types-Object, data ) + data = data || selector; + selector = undefined; + } + for ( type in types ) { + on( elem, type, selector, data, types[ type ], one ); + } + return elem; + } + + if ( data == null && fn == null ) { + + // ( types, fn ) + fn = selector; + data = selector = undefined; + } else if ( fn == null ) { + if ( typeof selector === "string" ) { + + // ( types, selector, fn ) + fn = data; + data = undefined; + } else { + + // ( types, data, fn ) + fn = data; + data = selector; + selector = undefined; + } + } + if ( fn === false ) { + fn = returnFalse; + } else if ( !fn ) { + return elem; + } + + if ( one === 1 ) { + origFn = fn; + fn = function( event ) { + + // Can use an empty set, since event contains the info + jQuery().off( event ); + return origFn.apply( this, arguments ); + }; + + // Use same guid so caller can remove using origFn + fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); + } + return elem.each( function() { + jQuery.event.add( this, types, fn, data, selector ); + } ); +} + +/* + * Helper functions for managing events -- not part of the public interface. + * Props to Dean Edwards' addEvent library for many of the ideas. + */ +jQuery.event = { + + global: {}, + + add: function( elem, types, handler, data, selector ) { + + var handleObjIn, eventHandle, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.get( elem ); + + // Only attach events to objects that accept data + if ( !acceptData( elem ) ) { + return; + } + + // Caller can pass in an object of custom data in lieu of the handler + if ( handler.handler ) { + handleObjIn = handler; + handler = handleObjIn.handler; + selector = handleObjIn.selector; + } + + // Ensure that invalid selectors throw exceptions at attach time + // Evaluate against documentElement in case elem is a non-element node (e.g., document) + if ( selector ) { + jQuery.find.matchesSelector( documentElement, selector ); + } + + // Make sure that the handler has a unique ID, used to find/remove it later + if ( !handler.guid ) { + handler.guid = jQuery.guid++; + } + + // Init the element's event structure and main handler, if this is the first + if ( !( events = elemData.events ) ) { + events = elemData.events = Object.create( null ); + } + if ( !( eventHandle = elemData.handle ) ) { + eventHandle = elemData.handle = function( e ) { + + // Discard the second event of a jQuery.event.trigger() and + // when an event is called after a page has unloaded + return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? + jQuery.event.dispatch.apply( elem, arguments ) : undefined; + }; + } + + // Handle multiple events separated by a space + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // There *must* be a type, no attaching namespace-only handlers + if ( !type ) { + continue; + } + + // If event changes its type, use the special event handlers for the changed type + special = jQuery.event.special[ type ] || {}; + + // If selector defined, determine special event api type, otherwise given type + type = ( selector ? special.delegateType : special.bindType ) || type; + + // Update special based on newly reset type + special = jQuery.event.special[ type ] || {}; + + // handleObj is passed to all event handlers + handleObj = jQuery.extend( { + type: type, + origType: origType, + data: data, + handler: handler, + guid: handler.guid, + selector: selector, + needsContext: selector && jQuery.expr.match.needsContext.test( selector ), + namespace: namespaces.join( "." ) + }, handleObjIn ); + + // Init the event handler queue if we're the first + if ( !( handlers = events[ type ] ) ) { + handlers = events[ type ] = []; + handlers.delegateCount = 0; + + // Only use addEventListener if the special events handler returns false + if ( !special.setup || + special.setup.call( elem, data, namespaces, eventHandle ) === false ) { + + if ( elem.addEventListener ) { + elem.addEventListener( type, eventHandle ); + } + } + } + + if ( special.add ) { + special.add.call( elem, handleObj ); + + if ( !handleObj.handler.guid ) { + handleObj.handler.guid = handler.guid; + } + } + + // Add to the element's handler list, delegates in front + if ( selector ) { + handlers.splice( handlers.delegateCount++, 0, handleObj ); + } else { + handlers.push( handleObj ); + } + + // Keep track of which events have ever been used, for event optimization + jQuery.event.global[ type ] = true; + } + + }, + + // Detach an event or set of events from an element + remove: function( elem, types, handler, selector, mappedTypes ) { + + var j, origCount, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); + + if ( !elemData || !( events = elemData.events ) ) { + return; + } + + // Once for each type.namespace in types; type may be omitted + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // Unbind all events (on this namespace, if provided) for the element + if ( !type ) { + for ( type in events ) { + jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); + } + continue; + } + + special = jQuery.event.special[ type ] || {}; + type = ( selector ? special.delegateType : special.bindType ) || type; + handlers = events[ type ] || []; + tmp = tmp[ 2 ] && + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); + + // Remove matching events + origCount = j = handlers.length; + while ( j-- ) { + handleObj = handlers[ j ]; + + if ( ( mappedTypes || origType === handleObj.origType ) && + ( !handler || handler.guid === handleObj.guid ) && + ( !tmp || tmp.test( handleObj.namespace ) ) && + ( !selector || selector === handleObj.selector || + selector === "**" && handleObj.selector ) ) { + handlers.splice( j, 1 ); + + if ( handleObj.selector ) { + handlers.delegateCount--; + } + if ( special.remove ) { + special.remove.call( elem, handleObj ); + } + } + } + + // Remove generic event handler if we removed something and no more handlers exist + // (avoids potential for endless recursion during removal of special event handlers) + if ( origCount && !handlers.length ) { + if ( !special.teardown || + special.teardown.call( elem, namespaces, elemData.handle ) === false ) { + + jQuery.removeEvent( elem, type, elemData.handle ); + } + + delete events[ type ]; + } + } + + // Remove data and the expando if it's no longer used + if ( jQuery.isEmptyObject( events ) ) { + dataPriv.remove( elem, "handle events" ); + } + }, + + dispatch: function( nativeEvent ) { + + var i, j, ret, matched, handleObj, handlerQueue, + args = new Array( arguments.length ), + + // Make a writable jQuery.Event from the native event object + event = jQuery.event.fix( nativeEvent ), + + handlers = ( + dataPriv.get( this, "events" ) || Object.create( null ) + )[ event.type ] || [], + special = jQuery.event.special[ event.type ] || {}; + + // Use the fix-ed jQuery.Event rather than the (read-only) native event + args[ 0 ] = event; + + for ( i = 1; i < arguments.length; i++ ) { + args[ i ] = arguments[ i ]; + } + + event.delegateTarget = this; + + // Call the preDispatch hook for the mapped type, and let it bail if desired + if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { + return; + } + + // Determine handlers + handlerQueue = jQuery.event.handlers.call( this, event, handlers ); + + // Run delegates first; they may want to stop propagation beneath us + i = 0; + while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { + event.currentTarget = matched.elem; + + j = 0; + while ( ( handleObj = matched.handlers[ j++ ] ) && + !event.isImmediatePropagationStopped() ) { + + // If the event is namespaced, then each handler is only invoked if it is + // specially universal or its namespaces are a superset of the event's. + if ( !event.rnamespace || handleObj.namespace === false || + event.rnamespace.test( handleObj.namespace ) ) { + + event.handleObj = handleObj; + event.data = handleObj.data; + + ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || + handleObj.handler ).apply( matched.elem, args ); + + if ( ret !== undefined ) { + if ( ( event.result = ret ) === false ) { + event.preventDefault(); + event.stopPropagation(); + } + } + } + } + } + + // Call the postDispatch hook for the mapped type + if ( special.postDispatch ) { + special.postDispatch.call( this, event ); + } + + return event.result; + }, + + handlers: function( event, handlers ) { + var i, handleObj, sel, matchedHandlers, matchedSelectors, + handlerQueue = [], + delegateCount = handlers.delegateCount, + cur = event.target; + + // Find delegate handlers + if ( delegateCount && + + // Support: IE <=9 + // Black-hole SVG instance trees (trac-13180) + cur.nodeType && + + // Support: Firefox <=42 + // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861) + // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click + // Support: IE 11 only + // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343) + !( event.type === "click" && event.button >= 1 ) ) { + + for ( ; cur !== this; cur = cur.parentNode || this ) { + + // Don't check non-elements (#13208) + // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) + if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) { + matchedHandlers = []; + matchedSelectors = {}; + for ( i = 0; i < delegateCount; i++ ) { + handleObj = handlers[ i ]; + + // Don't conflict with Object.prototype properties (#13203) + sel = handleObj.selector + " "; + + if ( matchedSelectors[ sel ] === undefined ) { + matchedSelectors[ sel ] = handleObj.needsContext ? + jQuery( sel, this ).index( cur ) > -1 : + jQuery.find( sel, this, null, [ cur ] ).length; + } + if ( matchedSelectors[ sel ] ) { + matchedHandlers.push( handleObj ); + } + } + if ( matchedHandlers.length ) { + handlerQueue.push( { elem: cur, handlers: matchedHandlers } ); + } + } + } + } + + // Add the remaining (directly-bound) handlers + cur = this; + if ( delegateCount < handlers.length ) { + handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } ); + } + + return handlerQueue; + }, + + addProp: function( name, hook ) { + Object.defineProperty( jQuery.Event.prototype, name, { + enumerable: true, + configurable: true, + + get: isFunction( hook ) ? + function() { + if ( this.originalEvent ) { + return hook( this.originalEvent ); + } + } : + function() { + if ( this.originalEvent ) { + return this.originalEvent[ name ]; + } + }, + + set: function( value ) { + Object.defineProperty( this, name, { + enumerable: true, + configurable: true, + writable: true, + value: value + } ); + } + } ); + }, + + fix: function( originalEvent ) { + return originalEvent[ jQuery.expando ] ? + originalEvent : + new jQuery.Event( originalEvent ); + }, + + special: { + load: { + + // Prevent triggered image.load events from bubbling to window.load + noBubble: true + }, + click: { + + // Utilize native event to ensure correct state for checkable inputs + setup: function( data ) { + + // For mutual compressibility with _default, replace `this` access with a local var. + // `|| data` is dead code meant only to preserve the variable through minification. + var el = this || data; + + // Claim the first handler + if ( rcheckableType.test( el.type ) && + el.click && nodeName( el, "input" ) ) { + + // dataPriv.set( el, "click", ... ) + leverageNative( el, "click", returnTrue ); + } + + // Return false to allow normal processing in the caller + return false; + }, + trigger: function( data ) { + + // For mutual compressibility with _default, replace `this` access with a local var. + // `|| data` is dead code meant only to preserve the variable through minification. + var el = this || data; + + // Force setup before triggering a click + if ( rcheckableType.test( el.type ) && + el.click && nodeName( el, "input" ) ) { + + leverageNative( el, "click" ); + } + + // Return non-false to allow normal event-path propagation + return true; + }, + + // For cross-browser consistency, suppress native .click() on links + // Also prevent it if we're currently inside a leveraged native-event stack + _default: function( event ) { + var target = event.target; + return rcheckableType.test( target.type ) && + target.click && nodeName( target, "input" ) && + dataPriv.get( target, "click" ) || + nodeName( target, "a" ); + } + }, + + beforeunload: { + postDispatch: function( event ) { + + // Support: Firefox 20+ + // Firefox doesn't alert if the returnValue field is not set. + if ( event.result !== undefined && event.originalEvent ) { + event.originalEvent.returnValue = event.result; + } + } + } + } +}; + +// Ensure the presence of an event listener that handles manually-triggered +// synthetic events by interrupting progress until reinvoked in response to +// *native* events that it fires directly, ensuring that state changes have +// already occurred before other listeners are invoked. +function leverageNative( el, type, expectSync ) { + + // Missing expectSync indicates a trigger call, which must force setup through jQuery.event.add + if ( !expectSync ) { + if ( dataPriv.get( el, type ) === undefined ) { + jQuery.event.add( el, type, returnTrue ); + } + return; + } + + // Register the controller as a special universal handler for all event namespaces + dataPriv.set( el, type, false ); + jQuery.event.add( el, type, { + namespace: false, + handler: function( event ) { + var notAsync, result, + saved = dataPriv.get( this, type ); + + if ( ( event.isTrigger & 1 ) && this[ type ] ) { + + // Interrupt processing of the outer synthetic .trigger()ed event + // Saved data should be false in such cases, but might be a leftover capture object + // from an async native handler (gh-4350) + if ( !saved.length ) { + + // Store arguments for use when handling the inner native event + // There will always be at least one argument (an event object), so this array + // will not be confused with a leftover capture object. + saved = slice.call( arguments ); + dataPriv.set( this, type, saved ); + + // Trigger the native event and capture its result + // Support: IE <=9 - 11+ + // focus() and blur() are asynchronous + notAsync = expectSync( this, type ); + this[ type ](); + result = dataPriv.get( this, type ); + if ( saved !== result || notAsync ) { + dataPriv.set( this, type, false ); + } else { + result = {}; + } + if ( saved !== result ) { + + // Cancel the outer synthetic event + event.stopImmediatePropagation(); + event.preventDefault(); + + // Support: Chrome 86+ + // In Chrome, if an element having a focusout handler is blurred by + // clicking outside of it, it invokes the handler synchronously. If + // that handler calls `.remove()` on the element, the data is cleared, + // leaving `result` undefined. We need to guard against this. + return result && result.value; + } + + // If this is an inner synthetic event for an event with a bubbling surrogate + // (focus or blur), assume that the surrogate already propagated from triggering the + // native event and prevent that from happening again here. + // This technically gets the ordering wrong w.r.t. to `.trigger()` (in which the + // bubbling surrogate propagates *after* the non-bubbling base), but that seems + // less bad than duplication. + } else if ( ( jQuery.event.special[ type ] || {} ).delegateType ) { + event.stopPropagation(); + } + + // If this is a native event triggered above, everything is now in order + // Fire an inner synthetic event with the original arguments + } else if ( saved.length ) { + + // ...and capture the result + dataPriv.set( this, type, { + value: jQuery.event.trigger( + + // Support: IE <=9 - 11+ + // Extend with the prototype to reset the above stopImmediatePropagation() + jQuery.extend( saved[ 0 ], jQuery.Event.prototype ), + saved.slice( 1 ), + this + ) + } ); + + // Abort handling of the native event + event.stopImmediatePropagation(); + } + } + } ); +} + +jQuery.removeEvent = function( elem, type, handle ) { + + // This "if" is needed for plain objects + if ( elem.removeEventListener ) { + elem.removeEventListener( type, handle ); + } +}; + +jQuery.Event = function( src, props ) { + + // Allow instantiation without the 'new' keyword + if ( !( this instanceof jQuery.Event ) ) { + return new jQuery.Event( src, props ); + } + + // Event object + if ( src && src.type ) { + this.originalEvent = src; + this.type = src.type; + + // Events bubbling up the document may have been marked as prevented + // by a handler lower down the tree; reflect the correct value. + this.isDefaultPrevented = src.defaultPrevented || + src.defaultPrevented === undefined && + + // Support: Android <=2.3 only + src.returnValue === false ? + returnTrue : + returnFalse; + + // Create target properties + // Support: Safari <=6 - 7 only + // Target should not be a text node (#504, #13143) + this.target = ( src.target && src.target.nodeType === 3 ) ? + src.target.parentNode : + src.target; + + this.currentTarget = src.currentTarget; + this.relatedTarget = src.relatedTarget; + + // Event type + } else { + this.type = src; + } + + // Put explicitly provided properties onto the event object + if ( props ) { + jQuery.extend( this, props ); + } + + // Create a timestamp if incoming event doesn't have one + this.timeStamp = src && src.timeStamp || Date.now(); + + // Mark it as fixed + this[ jQuery.expando ] = true; +}; + +// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding +// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html +jQuery.Event.prototype = { + constructor: jQuery.Event, + isDefaultPrevented: returnFalse, + isPropagationStopped: returnFalse, + isImmediatePropagationStopped: returnFalse, + isSimulated: false, + + preventDefault: function() { + var e = this.originalEvent; + + this.isDefaultPrevented = returnTrue; + + if ( e && !this.isSimulated ) { + e.preventDefault(); + } + }, + stopPropagation: function() { + var e = this.originalEvent; + + this.isPropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopPropagation(); + } + }, + stopImmediatePropagation: function() { + var e = this.originalEvent; + + this.isImmediatePropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopImmediatePropagation(); + } + + this.stopPropagation(); + } +}; + +// Includes all common event props including KeyEvent and MouseEvent specific props +jQuery.each( { + altKey: true, + bubbles: true, + cancelable: true, + changedTouches: true, + ctrlKey: true, + detail: true, + eventPhase: true, + metaKey: true, + pageX: true, + pageY: true, + shiftKey: true, + view: true, + "char": true, + code: true, + charCode: true, + key: true, + keyCode: true, + button: true, + buttons: true, + clientX: true, + clientY: true, + offsetX: true, + offsetY: true, + pointerId: true, + pointerType: true, + screenX: true, + screenY: true, + targetTouches: true, + toElement: true, + touches: true, + which: true +}, jQuery.event.addProp ); + +jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) { + jQuery.event.special[ type ] = { + + // Utilize native event if possible so blur/focus sequence is correct + setup: function() { + + // Claim the first handler + // dataPriv.set( this, "focus", ... ) + // dataPriv.set( this, "blur", ... ) + leverageNative( this, type, expectSync ); + + // Return false to allow normal processing in the caller + return false; + }, + trigger: function() { + + // Force setup before trigger + leverageNative( this, type ); + + // Return non-false to allow normal event-path propagation + return true; + }, + + // Suppress native focus or blur as it's already being fired + // in leverageNative. + _default: function() { + return true; + }, + + delegateType: delegateType + }; +} ); + +// Create mouseenter/leave events using mouseover/out and event-time checks +// so that event delegation works in jQuery. +// Do the same for pointerenter/pointerleave and pointerover/pointerout +// +// Support: Safari 7 only +// Safari sends mouseenter too often; see: +// https://bugs.chromium.org/p/chromium/issues/detail?id=470258 +// for the description of the bug (it existed in older Chrome versions as well). +jQuery.each( { + mouseenter: "mouseover", + mouseleave: "mouseout", + pointerenter: "pointerover", + pointerleave: "pointerout" +}, function( orig, fix ) { + jQuery.event.special[ orig ] = { + delegateType: fix, + bindType: fix, + + handle: function( event ) { + var ret, + target = this, + related = event.relatedTarget, + handleObj = event.handleObj; + + // For mouseenter/leave call the handler if related is outside the target. + // NB: No relatedTarget if the mouse left/entered the browser window + if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { + event.type = handleObj.origType; + ret = handleObj.handler.apply( this, arguments ); + event.type = fix; + } + return ret; + } + }; +} ); + +jQuery.fn.extend( { + + on: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn ); + }, + one: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn, 1 ); + }, + off: function( types, selector, fn ) { + var handleObj, type; + if ( types && types.preventDefault && types.handleObj ) { + + // ( event ) dispatched jQuery.Event + handleObj = types.handleObj; + jQuery( types.delegateTarget ).off( + handleObj.namespace ? + handleObj.origType + "." + handleObj.namespace : + handleObj.origType, + handleObj.selector, + handleObj.handler + ); + return this; + } + if ( typeof types === "object" ) { + + // ( types-object [, selector] ) + for ( type in types ) { + this.off( type, selector, types[ type ] ); + } + return this; + } + if ( selector === false || typeof selector === "function" ) { + + // ( types [, fn] ) + fn = selector; + selector = undefined; + } + if ( fn === false ) { + fn = returnFalse; + } + return this.each( function() { + jQuery.event.remove( this, types, fn, selector ); + } ); + } +} ); + + +var + + // Support: IE <=10 - 11, Edge 12 - 13 only + // In IE/Edge using regex groups here causes severe slowdowns. + // See https://connect.microsoft.com/IE/feedback/details/1736512/ + rnoInnerhtml = /\s*$/g; + +// Prefer a tbody over its parent table for containing new rows +function manipulationTarget( elem, content ) { + if ( nodeName( elem, "table" ) && + nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) { + + return jQuery( elem ).children( "tbody" )[ 0 ] || elem; + } + + return elem; +} + +// Replace/restore the type attribute of script elements for safe DOM manipulation +function disableScript( elem ) { + elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type; + return elem; +} +function restoreScript( elem ) { + if ( ( elem.type || "" ).slice( 0, 5 ) === "true/" ) { + elem.type = elem.type.slice( 5 ); + } else { + elem.removeAttribute( "type" ); + } + + return elem; +} + +function cloneCopyEvent( src, dest ) { + var i, l, type, pdataOld, udataOld, udataCur, events; + + if ( dest.nodeType !== 1 ) { + return; + } + + // 1. Copy private data: events, handlers, etc. + if ( dataPriv.hasData( src ) ) { + pdataOld = dataPriv.get( src ); + events = pdataOld.events; + + if ( events ) { + dataPriv.remove( dest, "handle events" ); + + for ( type in events ) { + for ( i = 0, l = events[ type ].length; i < l; i++ ) { + jQuery.event.add( dest, type, events[ type ][ i ] ); + } + } + } + } + + // 2. Copy user data + if ( dataUser.hasData( src ) ) { + udataOld = dataUser.access( src ); + udataCur = jQuery.extend( {}, udataOld ); + + dataUser.set( dest, udataCur ); + } +} + +// Fix IE bugs, see support tests +function fixInput( src, dest ) { + var nodeName = dest.nodeName.toLowerCase(); + + // Fails to persist the checked state of a cloned checkbox or radio button. + if ( nodeName === "input" && rcheckableType.test( src.type ) ) { + dest.checked = src.checked; + + // Fails to return the selected option to the default selected state when cloning options + } else if ( nodeName === "input" || nodeName === "textarea" ) { + dest.defaultValue = src.defaultValue; + } +} + +function domManip( collection, args, callback, ignored ) { + + // Flatten any nested arrays + args = flat( args ); + + var fragment, first, scripts, hasScripts, node, doc, + i = 0, + l = collection.length, + iNoClone = l - 1, + value = args[ 0 ], + valueIsFunction = isFunction( value ); + + // We can't cloneNode fragments that contain checked, in WebKit + if ( valueIsFunction || + ( l > 1 && typeof value === "string" && + !support.checkClone && rchecked.test( value ) ) ) { + return collection.each( function( index ) { + var self = collection.eq( index ); + if ( valueIsFunction ) { + args[ 0 ] = value.call( this, index, self.html() ); + } + domManip( self, args, callback, ignored ); + } ); + } + + if ( l ) { + fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored ); + first = fragment.firstChild; + + if ( fragment.childNodes.length === 1 ) { + fragment = first; + } + + // Require either new content or an interest in ignored elements to invoke the callback + if ( first || ignored ) { + scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); + hasScripts = scripts.length; + + // Use the original fragment for the last item + // instead of the first because it can end up + // being emptied incorrectly in certain situations (#8070). + for ( ; i < l; i++ ) { + node = fragment; + + if ( i !== iNoClone ) { + node = jQuery.clone( node, true, true ); + + // Keep references to cloned scripts for later restoration + if ( hasScripts ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( scripts, getAll( node, "script" ) ); + } + } + + callback.call( collection[ i ], node, i ); + } + + if ( hasScripts ) { + doc = scripts[ scripts.length - 1 ].ownerDocument; + + // Reenable scripts + jQuery.map( scripts, restoreScript ); + + // Evaluate executable scripts on first document insertion + for ( i = 0; i < hasScripts; i++ ) { + node = scripts[ i ]; + if ( rscriptType.test( node.type || "" ) && + !dataPriv.access( node, "globalEval" ) && + jQuery.contains( doc, node ) ) { + + if ( node.src && ( node.type || "" ).toLowerCase() !== "module" ) { + + // Optional AJAX dependency, but won't run scripts if not present + if ( jQuery._evalUrl && !node.noModule ) { + jQuery._evalUrl( node.src, { + nonce: node.nonce || node.getAttribute( "nonce" ) + }, doc ); + } + } else { + DOMEval( node.textContent.replace( rcleanScript, "" ), node, doc ); + } + } + } + } + } + } + + return collection; +} + +function remove( elem, selector, keepData ) { + var node, + nodes = selector ? jQuery.filter( selector, elem ) : elem, + i = 0; + + for ( ; ( node = nodes[ i ] ) != null; i++ ) { + if ( !keepData && node.nodeType === 1 ) { + jQuery.cleanData( getAll( node ) ); + } + + if ( node.parentNode ) { + if ( keepData && isAttached( node ) ) { + setGlobalEval( getAll( node, "script" ) ); + } + node.parentNode.removeChild( node ); + } + } + + return elem; +} + +jQuery.extend( { + htmlPrefilter: function( html ) { + return html; + }, + + clone: function( elem, dataAndEvents, deepDataAndEvents ) { + var i, l, srcElements, destElements, + clone = elem.cloneNode( true ), + inPage = isAttached( elem ); + + // Fix IE cloning issues + if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && + !jQuery.isXMLDoc( elem ) ) { + + // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2 + destElements = getAll( clone ); + srcElements = getAll( elem ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + fixInput( srcElements[ i ], destElements[ i ] ); + } + } + + // Copy the events from the original to the clone + if ( dataAndEvents ) { + if ( deepDataAndEvents ) { + srcElements = srcElements || getAll( elem ); + destElements = destElements || getAll( clone ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + cloneCopyEvent( srcElements[ i ], destElements[ i ] ); + } + } else { + cloneCopyEvent( elem, clone ); + } + } + + // Preserve script evaluation history + destElements = getAll( clone, "script" ); + if ( destElements.length > 0 ) { + setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); + } + + // Return the cloned set + return clone; + }, + + cleanData: function( elems ) { + var data, elem, type, + special = jQuery.event.special, + i = 0; + + for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) { + if ( acceptData( elem ) ) { + if ( ( data = elem[ dataPriv.expando ] ) ) { + if ( data.events ) { + for ( type in data.events ) { + if ( special[ type ] ) { + jQuery.event.remove( elem, type ); + + // This is a shortcut to avoid jQuery.event.remove's overhead + } else { + jQuery.removeEvent( elem, type, data.handle ); + } + } + } + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataPriv.expando ] = undefined; + } + if ( elem[ dataUser.expando ] ) { + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataUser.expando ] = undefined; + } + } + } + } +} ); + +jQuery.fn.extend( { + detach: function( selector ) { + return remove( this, selector, true ); + }, + + remove: function( selector ) { + return remove( this, selector ); + }, + + text: function( value ) { + return access( this, function( value ) { + return value === undefined ? + jQuery.text( this ) : + this.empty().each( function() { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + this.textContent = value; + } + } ); + }, null, value, arguments.length ); + }, + + append: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.appendChild( elem ); + } + } ); + }, + + prepend: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.insertBefore( elem, target.firstChild ); + } + } ); + }, + + before: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this ); + } + } ); + }, + + after: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this.nextSibling ); + } + } ); + }, + + empty: function() { + var elem, + i = 0; + + for ( ; ( elem = this[ i ] ) != null; i++ ) { + if ( elem.nodeType === 1 ) { + + // Prevent memory leaks + jQuery.cleanData( getAll( elem, false ) ); + + // Remove any remaining nodes + elem.textContent = ""; + } + } + + return this; + }, + + clone: function( dataAndEvents, deepDataAndEvents ) { + dataAndEvents = dataAndEvents == null ? false : dataAndEvents; + deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; + + return this.map( function() { + return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); + } ); + }, + + html: function( value ) { + return access( this, function( value ) { + var elem = this[ 0 ] || {}, + i = 0, + l = this.length; + + if ( value === undefined && elem.nodeType === 1 ) { + return elem.innerHTML; + } + + // See if we can take a shortcut and just use innerHTML + if ( typeof value === "string" && !rnoInnerhtml.test( value ) && + !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) { + + value = jQuery.htmlPrefilter( value ); + + try { + for ( ; i < l; i++ ) { + elem = this[ i ] || {}; + + // Remove element nodes and prevent memory leaks + if ( elem.nodeType === 1 ) { + jQuery.cleanData( getAll( elem, false ) ); + elem.innerHTML = value; + } + } + + elem = 0; + + // If using innerHTML throws an exception, use the fallback method + } catch ( e ) {} + } + + if ( elem ) { + this.empty().append( value ); + } + }, null, value, arguments.length ); + }, + + replaceWith: function() { + var ignored = []; + + // Make the changes, replacing each non-ignored context element with the new content + return domManip( this, arguments, function( elem ) { + var parent = this.parentNode; + + if ( jQuery.inArray( this, ignored ) < 0 ) { + jQuery.cleanData( getAll( this ) ); + if ( parent ) { + parent.replaceChild( elem, this ); + } + } + + // Force callback invocation + }, ignored ); + } +} ); + +jQuery.each( { + appendTo: "append", + prependTo: "prepend", + insertBefore: "before", + insertAfter: "after", + replaceAll: "replaceWith" +}, function( name, original ) { + jQuery.fn[ name ] = function( selector ) { + var elems, + ret = [], + insert = jQuery( selector ), + last = insert.length - 1, + i = 0; + + for ( ; i <= last; i++ ) { + elems = i === last ? this : this.clone( true ); + jQuery( insert[ i ] )[ original ]( elems ); + + // Support: Android <=4.0 only, PhantomJS 1 only + // .get() because push.apply(_, arraylike) throws on ancient WebKit + push.apply( ret, elems.get() ); + } + + return this.pushStack( ret ); + }; +} ); +var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" ); + +var getStyles = function( elem ) { + + // Support: IE <=11 only, Firefox <=30 (#15098, #14150) + // IE throws on elements created in popups + // FF meanwhile throws on frame elements through "defaultView.getComputedStyle" + var view = elem.ownerDocument.defaultView; + + if ( !view || !view.opener ) { + view = window; + } + + return view.getComputedStyle( elem ); + }; + +var swap = function( elem, options, callback ) { + var ret, name, + old = {}; + + // Remember the old values, and insert the new ones + for ( name in options ) { + old[ name ] = elem.style[ name ]; + elem.style[ name ] = options[ name ]; + } + + ret = callback.call( elem ); + + // Revert the old values + for ( name in options ) { + elem.style[ name ] = old[ name ]; + } + + return ret; +}; + + +var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" ); + + + +( function() { + + // Executing both pixelPosition & boxSizingReliable tests require only one layout + // so they're executed at the same time to save the second computation. + function computeStyleTests() { + + // This is a singleton, we need to execute it only once + if ( !div ) { + return; + } + + container.style.cssText = "position:absolute;left:-11111px;width:60px;" + + "margin-top:1px;padding:0;border:0"; + div.style.cssText = + "position:relative;display:block;box-sizing:border-box;overflow:scroll;" + + "margin:auto;border:1px;padding:1px;" + + "width:60%;top:1%"; + documentElement.appendChild( container ).appendChild( div ); + + var divStyle = window.getComputedStyle( div ); + pixelPositionVal = divStyle.top !== "1%"; + + // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44 + reliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12; + + // Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3 + // Some styles come back with percentage values, even though they shouldn't + div.style.right = "60%"; + pixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36; + + // Support: IE 9 - 11 only + // Detect misreporting of content dimensions for box-sizing:border-box elements + boxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36; + + // Support: IE 9 only + // Detect overflow:scroll screwiness (gh-3699) + // Support: Chrome <=64 + // Don't get tricked when zoom affects offsetWidth (gh-4029) + div.style.position = "absolute"; + scrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12; + + documentElement.removeChild( container ); + + // Nullify the div so it wouldn't be stored in the memory and + // it will also be a sign that checks already performed + div = null; + } + + function roundPixelMeasures( measure ) { + return Math.round( parseFloat( measure ) ); + } + + var pixelPositionVal, boxSizingReliableVal, scrollboxSizeVal, pixelBoxStylesVal, + reliableTrDimensionsVal, reliableMarginLeftVal, + container = document.createElement( "div" ), + div = document.createElement( "div" ); + + // Finish early in limited (non-browser) environments + if ( !div.style ) { + return; + } + + // Support: IE <=9 - 11 only + // Style of cloned element affects source element cloned (#8908) + div.style.backgroundClip = "content-box"; + div.cloneNode( true ).style.backgroundClip = ""; + support.clearCloneStyle = div.style.backgroundClip === "content-box"; + + jQuery.extend( support, { + boxSizingReliable: function() { + computeStyleTests(); + return boxSizingReliableVal; + }, + pixelBoxStyles: function() { + computeStyleTests(); + return pixelBoxStylesVal; + }, + pixelPosition: function() { + computeStyleTests(); + return pixelPositionVal; + }, + reliableMarginLeft: function() { + computeStyleTests(); + return reliableMarginLeftVal; + }, + scrollboxSize: function() { + computeStyleTests(); + return scrollboxSizeVal; + }, + + // Support: IE 9 - 11+, Edge 15 - 18+ + // IE/Edge misreport `getComputedStyle` of table rows with width/height + // set in CSS while `offset*` properties report correct values. + // Behavior in IE 9 is more subtle than in newer versions & it passes + // some versions of this test; make sure not to make it pass there! + // + // Support: Firefox 70+ + // Only Firefox includes border widths + // in computed dimensions. (gh-4529) + reliableTrDimensions: function() { + var table, tr, trChild, trStyle; + if ( reliableTrDimensionsVal == null ) { + table = document.createElement( "table" ); + tr = document.createElement( "tr" ); + trChild = document.createElement( "div" ); + + table.style.cssText = "position:absolute;left:-11111px;border-collapse:separate"; + tr.style.cssText = "border:1px solid"; + + // Support: Chrome 86+ + // Height set through cssText does not get applied. + // Computed height then comes back as 0. + tr.style.height = "1px"; + trChild.style.height = "9px"; + + // Support: Android 8 Chrome 86+ + // In our bodyBackground.html iframe, + // display for all div elements is set to "inline", + // which causes a problem only in Android 8 Chrome 86. + // Ensuring the div is display: block + // gets around this issue. + trChild.style.display = "block"; + + documentElement + .appendChild( table ) + .appendChild( tr ) + .appendChild( trChild ); + + trStyle = window.getComputedStyle( tr ); + reliableTrDimensionsVal = ( parseInt( trStyle.height, 10 ) + + parseInt( trStyle.borderTopWidth, 10 ) + + parseInt( trStyle.borderBottomWidth, 10 ) ) === tr.offsetHeight; + + documentElement.removeChild( table ); + } + return reliableTrDimensionsVal; + } + } ); +} )(); + + +function curCSS( elem, name, computed ) { + var width, minWidth, maxWidth, ret, + + // Support: Firefox 51+ + // Retrieving style before computed somehow + // fixes an issue with getting wrong values + // on detached elements + style = elem.style; + + computed = computed || getStyles( elem ); + + // getPropertyValue is needed for: + // .css('filter') (IE 9 only, #12537) + // .css('--customProperty) (#3144) + if ( computed ) { + ret = computed.getPropertyValue( name ) || computed[ name ]; + + if ( ret === "" && !isAttached( elem ) ) { + ret = jQuery.style( elem, name ); + } + + // A tribute to the "awesome hack by Dean Edwards" + // Android Browser returns percentage for some values, + // but width seems to be reliably pixels. + // This is against the CSSOM draft spec: + // https://drafts.csswg.org/cssom/#resolved-values + if ( !support.pixelBoxStyles() && rnumnonpx.test( ret ) && rboxStyle.test( name ) ) { + + // Remember the original values + width = style.width; + minWidth = style.minWidth; + maxWidth = style.maxWidth; + + // Put in the new values to get a computed value out + style.minWidth = style.maxWidth = style.width = ret; + ret = computed.width; + + // Revert the changed values + style.width = width; + style.minWidth = minWidth; + style.maxWidth = maxWidth; + } + } + + return ret !== undefined ? + + // Support: IE <=9 - 11 only + // IE returns zIndex value as an integer. + ret + "" : + ret; +} + + +function addGetHookIf( conditionFn, hookFn ) { + + // Define the hook, we'll check on the first run if it's really needed. + return { + get: function() { + if ( conditionFn() ) { + + // Hook not needed (or it's not possible to use it due + // to missing dependency), remove it. + delete this.get; + return; + } + + // Hook needed; redefine it so that the support test is not executed again. + return ( this.get = hookFn ).apply( this, arguments ); + } + }; +} + + +var cssPrefixes = [ "Webkit", "Moz", "ms" ], + emptyStyle = document.createElement( "div" ).style, + vendorProps = {}; + +// Return a vendor-prefixed property or undefined +function vendorPropName( name ) { + + // Check for vendor prefixed names + var capName = name[ 0 ].toUpperCase() + name.slice( 1 ), + i = cssPrefixes.length; + + while ( i-- ) { + name = cssPrefixes[ i ] + capName; + if ( name in emptyStyle ) { + return name; + } + } +} + +// Return a potentially-mapped jQuery.cssProps or vendor prefixed property +function finalPropName( name ) { + var final = jQuery.cssProps[ name ] || vendorProps[ name ]; + + if ( final ) { + return final; + } + if ( name in emptyStyle ) { + return name; + } + return vendorProps[ name ] = vendorPropName( name ) || name; +} + + +var + + // Swappable if display is none or starts with table + // except "table", "table-cell", or "table-caption" + // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display + rdisplayswap = /^(none|table(?!-c[ea]).+)/, + rcustomProp = /^--/, + cssShow = { position: "absolute", visibility: "hidden", display: "block" }, + cssNormalTransform = { + letterSpacing: "0", + fontWeight: "400" + }; + +function setPositiveNumber( _elem, value, subtract ) { + + // Any relative (+/-) values have already been + // normalized at this point + var matches = rcssNum.exec( value ); + return matches ? + + // Guard against undefined "subtract", e.g., when used as in cssHooks + Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) : + value; +} + +function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, computedVal ) { + var i = dimension === "width" ? 1 : 0, + extra = 0, + delta = 0; + + // Adjustment may not be necessary + if ( box === ( isBorderBox ? "border" : "content" ) ) { + return 0; + } + + for ( ; i < 4; i += 2 ) { + + // Both box models exclude margin + if ( box === "margin" ) { + delta += jQuery.css( elem, box + cssExpand[ i ], true, styles ); + } + + // If we get here with a content-box, we're seeking "padding" or "border" or "margin" + if ( !isBorderBox ) { + + // Add padding + delta += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + + // For "border" or "margin", add border + if ( box !== "padding" ) { + delta += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + + // But still keep track of it otherwise + } else { + extra += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + + // If we get here with a border-box (content + padding + border), we're seeking "content" or + // "padding" or "margin" + } else { + + // For "content", subtract padding + if ( box === "content" ) { + delta -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + } + + // For "content" or "padding", subtract border + if ( box !== "margin" ) { + delta -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + } + } + + // Account for positive content-box scroll gutter when requested by providing computedVal + if ( !isBorderBox && computedVal >= 0 ) { + + // offsetWidth/offsetHeight is a rounded sum of content, padding, scroll gutter, and border + // Assuming integer scroll gutter, subtract the rest and round down + delta += Math.max( 0, Math.ceil( + elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - + computedVal - + delta - + extra - + 0.5 + + // If offsetWidth/offsetHeight is unknown, then we can't determine content-box scroll gutter + // Use an explicit zero to avoid NaN (gh-3964) + ) ) || 0; + } + + return delta; +} + +function getWidthOrHeight( elem, dimension, extra ) { + + // Start with computed style + var styles = getStyles( elem ), + + // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-4322). + // Fake content-box until we know it's needed to know the true value. + boxSizingNeeded = !support.boxSizingReliable() || extra, + isBorderBox = boxSizingNeeded && + jQuery.css( elem, "boxSizing", false, styles ) === "border-box", + valueIsBorderBox = isBorderBox, + + val = curCSS( elem, dimension, styles ), + offsetProp = "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ); + + // Support: Firefox <=54 + // Return a confounding non-pixel value or feign ignorance, as appropriate. + if ( rnumnonpx.test( val ) ) { + if ( !extra ) { + return val; + } + val = "auto"; + } + + + // Support: IE 9 - 11 only + // Use offsetWidth/offsetHeight for when box sizing is unreliable. + // In those cases, the computed value can be trusted to be border-box. + if ( ( !support.boxSizingReliable() && isBorderBox || + + // Support: IE 10 - 11+, Edge 15 - 18+ + // IE/Edge misreport `getComputedStyle` of table rows with width/height + // set in CSS while `offset*` properties report correct values. + // Interestingly, in some cases IE 9 doesn't suffer from this issue. + !support.reliableTrDimensions() && nodeName( elem, "tr" ) || + + // Fall back to offsetWidth/offsetHeight when value is "auto" + // This happens for inline elements with no explicit setting (gh-3571) + val === "auto" || + + // Support: Android <=4.1 - 4.3 only + // Also use offsetWidth/offsetHeight for misreported inline dimensions (gh-3602) + !parseFloat( val ) && jQuery.css( elem, "display", false, styles ) === "inline" ) && + + // Make sure the element is visible & connected + elem.getClientRects().length ) { + + isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; + + // Where available, offsetWidth/offsetHeight approximate border box dimensions. + // Where not available (e.g., SVG), assume unreliable box-sizing and interpret the + // retrieved value as a content box dimension. + valueIsBorderBox = offsetProp in elem; + if ( valueIsBorderBox ) { + val = elem[ offsetProp ]; + } + } + + // Normalize "" and auto + val = parseFloat( val ) || 0; + + // Adjust for the element's box model + return ( val + + boxModelAdjustment( + elem, + dimension, + extra || ( isBorderBox ? "border" : "content" ), + valueIsBorderBox, + styles, + + // Provide the current computed size to request scroll gutter calculation (gh-3589) + val + ) + ) + "px"; +} + +jQuery.extend( { + + // Add in style property hooks for overriding the default + // behavior of getting and setting a style property + cssHooks: { + opacity: { + get: function( elem, computed ) { + if ( computed ) { + + // We should always get a number back from opacity + var ret = curCSS( elem, "opacity" ); + return ret === "" ? "1" : ret; + } + } + } + }, + + // Don't automatically add "px" to these possibly-unitless properties + cssNumber: { + "animationIterationCount": true, + "columnCount": true, + "fillOpacity": true, + "flexGrow": true, + "flexShrink": true, + "fontWeight": true, + "gridArea": true, + "gridColumn": true, + "gridColumnEnd": true, + "gridColumnStart": true, + "gridRow": true, + "gridRowEnd": true, + "gridRowStart": true, + "lineHeight": true, + "opacity": true, + "order": true, + "orphans": true, + "widows": true, + "zIndex": true, + "zoom": true + }, + + // Add in properties whose names you wish to fix before + // setting or getting the value + cssProps: {}, + + // Get and set the style property on a DOM Node + style: function( elem, name, value, extra ) { + + // Don't set styles on text and comment nodes + if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { + return; + } + + // Make sure that we're working with the right name + var ret, type, hooks, + origName = camelCase( name ), + isCustomProp = rcustomProp.test( name ), + style = elem.style; + + // Make sure that we're working with the right name. We don't + // want to query the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Gets hook for the prefixed version, then unprefixed version + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // Check if we're setting a value + if ( value !== undefined ) { + type = typeof value; + + // Convert "+=" or "-=" to relative numbers (#7345) + if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) { + value = adjustCSS( elem, name, ret ); + + // Fixes bug #9237 + type = "number"; + } + + // Make sure that null and NaN values aren't set (#7116) + if ( value == null || value !== value ) { + return; + } + + // If a number was passed in, add the unit (except for certain CSS properties) + // The isCustomProp check can be removed in jQuery 4.0 when we only auto-append + // "px" to a few hardcoded values. + if ( type === "number" && !isCustomProp ) { + value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" ); + } + + // background-* props affect original clone's values + if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) { + style[ name ] = "inherit"; + } + + // If a hook was provided, use that value, otherwise just set the specified value + if ( !hooks || !( "set" in hooks ) || + ( value = hooks.set( elem, value, extra ) ) !== undefined ) { + + if ( isCustomProp ) { + style.setProperty( name, value ); + } else { + style[ name ] = value; + } + } + + } else { + + // If a hook was provided get the non-computed value from there + if ( hooks && "get" in hooks && + ( ret = hooks.get( elem, false, extra ) ) !== undefined ) { + + return ret; + } + + // Otherwise just get the value from the style object + return style[ name ]; + } + }, + + css: function( elem, name, extra, styles ) { + var val, num, hooks, + origName = camelCase( name ), + isCustomProp = rcustomProp.test( name ); + + // Make sure that we're working with the right name. We don't + // want to modify the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Try prefixed name followed by the unprefixed name + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // If a hook was provided get the computed value from there + if ( hooks && "get" in hooks ) { + val = hooks.get( elem, true, extra ); + } + + // Otherwise, if a way to get the computed value exists, use that + if ( val === undefined ) { + val = curCSS( elem, name, styles ); + } + + // Convert "normal" to computed value + if ( val === "normal" && name in cssNormalTransform ) { + val = cssNormalTransform[ name ]; + } + + // Make numeric if forced or a qualifier was provided and val looks numeric + if ( extra === "" || extra ) { + num = parseFloat( val ); + return extra === true || isFinite( num ) ? num || 0 : val; + } + + return val; + } +} ); + +jQuery.each( [ "height", "width" ], function( _i, dimension ) { + jQuery.cssHooks[ dimension ] = { + get: function( elem, computed, extra ) { + if ( computed ) { + + // Certain elements can have dimension info if we invisibly show them + // but it must have a current display style that would benefit + return rdisplayswap.test( jQuery.css( elem, "display" ) ) && + + // Support: Safari 8+ + // Table columns in Safari have non-zero offsetWidth & zero + // getBoundingClientRect().width unless display is changed. + // Support: IE <=11 only + // Running getBoundingClientRect on a disconnected node + // in IE throws an error. + ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ? + swap( elem, cssShow, function() { + return getWidthOrHeight( elem, dimension, extra ); + } ) : + getWidthOrHeight( elem, dimension, extra ); + } + }, + + set: function( elem, value, extra ) { + var matches, + styles = getStyles( elem ), + + // Only read styles.position if the test has a chance to fail + // to avoid forcing a reflow. + scrollboxSizeBuggy = !support.scrollboxSize() && + styles.position === "absolute", + + // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-3991) + boxSizingNeeded = scrollboxSizeBuggy || extra, + isBorderBox = boxSizingNeeded && + jQuery.css( elem, "boxSizing", false, styles ) === "border-box", + subtract = extra ? + boxModelAdjustment( + elem, + dimension, + extra, + isBorderBox, + styles + ) : + 0; + + // Account for unreliable border-box dimensions by comparing offset* to computed and + // faking a content-box to get border and padding (gh-3699) + if ( isBorderBox && scrollboxSizeBuggy ) { + subtract -= Math.ceil( + elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - + parseFloat( styles[ dimension ] ) - + boxModelAdjustment( elem, dimension, "border", false, styles ) - + 0.5 + ); + } + + // Convert to pixels if value adjustment is needed + if ( subtract && ( matches = rcssNum.exec( value ) ) && + ( matches[ 3 ] || "px" ) !== "px" ) { + + elem.style[ dimension ] = value; + value = jQuery.css( elem, dimension ); + } + + return setPositiveNumber( elem, value, subtract ); + } + }; +} ); + +jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft, + function( elem, computed ) { + if ( computed ) { + return ( parseFloat( curCSS( elem, "marginLeft" ) ) || + elem.getBoundingClientRect().left - + swap( elem, { marginLeft: 0 }, function() { + return elem.getBoundingClientRect().left; + } ) + ) + "px"; + } + } +); + +// These hooks are used by animate to expand properties +jQuery.each( { + margin: "", + padding: "", + border: "Width" +}, function( prefix, suffix ) { + jQuery.cssHooks[ prefix + suffix ] = { + expand: function( value ) { + var i = 0, + expanded = {}, + + // Assumes a single number if not a string + parts = typeof value === "string" ? value.split( " " ) : [ value ]; + + for ( ; i < 4; i++ ) { + expanded[ prefix + cssExpand[ i ] + suffix ] = + parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; + } + + return expanded; + } + }; + + if ( prefix !== "margin" ) { + jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; + } +} ); + +jQuery.fn.extend( { + css: function( name, value ) { + return access( this, function( elem, name, value ) { + var styles, len, + map = {}, + i = 0; + + if ( Array.isArray( name ) ) { + styles = getStyles( elem ); + len = name.length; + + for ( ; i < len; i++ ) { + map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); + } + + return map; + } + + return value !== undefined ? + jQuery.style( elem, name, value ) : + jQuery.css( elem, name ); + }, name, value, arguments.length > 1 ); + } +} ); + + +function Tween( elem, options, prop, end, easing ) { + return new Tween.prototype.init( elem, options, prop, end, easing ); +} +jQuery.Tween = Tween; + +Tween.prototype = { + constructor: Tween, + init: function( elem, options, prop, end, easing, unit ) { + this.elem = elem; + this.prop = prop; + this.easing = easing || jQuery.easing._default; + this.options = options; + this.start = this.now = this.cur(); + this.end = end; + this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); + }, + cur: function() { + var hooks = Tween.propHooks[ this.prop ]; + + return hooks && hooks.get ? + hooks.get( this ) : + Tween.propHooks._default.get( this ); + }, + run: function( percent ) { + var eased, + hooks = Tween.propHooks[ this.prop ]; + + if ( this.options.duration ) { + this.pos = eased = jQuery.easing[ this.easing ]( + percent, this.options.duration * percent, 0, 1, this.options.duration + ); + } else { + this.pos = eased = percent; + } + this.now = ( this.end - this.start ) * eased + this.start; + + if ( this.options.step ) { + this.options.step.call( this.elem, this.now, this ); + } + + if ( hooks && hooks.set ) { + hooks.set( this ); + } else { + Tween.propHooks._default.set( this ); + } + return this; + } +}; + +Tween.prototype.init.prototype = Tween.prototype; + +Tween.propHooks = { + _default: { + get: function( tween ) { + var result; + + // Use a property on the element directly when it is not a DOM element, + // or when there is no matching style property that exists. + if ( tween.elem.nodeType !== 1 || + tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) { + return tween.elem[ tween.prop ]; + } + + // Passing an empty string as a 3rd parameter to .css will automatically + // attempt a parseFloat and fallback to a string if the parse fails. + // Simple values such as "10px" are parsed to Float; + // complex values such as "rotate(1rad)" are returned as-is. + result = jQuery.css( tween.elem, tween.prop, "" ); + + // Empty strings, null, undefined and "auto" are converted to 0. + return !result || result === "auto" ? 0 : result; + }, + set: function( tween ) { + + // Use step hook for back compat. + // Use cssHook if its there. + // Use .style if available and use plain properties where available. + if ( jQuery.fx.step[ tween.prop ] ) { + jQuery.fx.step[ tween.prop ]( tween ); + } else if ( tween.elem.nodeType === 1 && ( + jQuery.cssHooks[ tween.prop ] || + tween.elem.style[ finalPropName( tween.prop ) ] != null ) ) { + jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); + } else { + tween.elem[ tween.prop ] = tween.now; + } + } + } +}; + +// Support: IE <=9 only +// Panic based approach to setting things on disconnected nodes +Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { + set: function( tween ) { + if ( tween.elem.nodeType && tween.elem.parentNode ) { + tween.elem[ tween.prop ] = tween.now; + } + } +}; + +jQuery.easing = { + linear: function( p ) { + return p; + }, + swing: function( p ) { + return 0.5 - Math.cos( p * Math.PI ) / 2; + }, + _default: "swing" +}; + +jQuery.fx = Tween.prototype.init; + +// Back compat <1.8 extension point +jQuery.fx.step = {}; + + + + +var + fxNow, inProgress, + rfxtypes = /^(?:toggle|show|hide)$/, + rrun = /queueHooks$/; + +function schedule() { + if ( inProgress ) { + if ( document.hidden === false && window.requestAnimationFrame ) { + window.requestAnimationFrame( schedule ); + } else { + window.setTimeout( schedule, jQuery.fx.interval ); + } + + jQuery.fx.tick(); + } +} + +// Animations created synchronously will run synchronously +function createFxNow() { + window.setTimeout( function() { + fxNow = undefined; + } ); + return ( fxNow = Date.now() ); +} + +// Generate parameters to create a standard animation +function genFx( type, includeWidth ) { + var which, + i = 0, + attrs = { height: type }; + + // If we include width, step value is 1 to do all cssExpand values, + // otherwise step value is 2 to skip over Left and Right + includeWidth = includeWidth ? 1 : 0; + for ( ; i < 4; i += 2 - includeWidth ) { + which = cssExpand[ i ]; + attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; + } + + if ( includeWidth ) { + attrs.opacity = attrs.width = type; + } + + return attrs; +} + +function createTween( value, prop, animation ) { + var tween, + collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ), + index = 0, + length = collection.length; + for ( ; index < length; index++ ) { + if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) { + + // We're done with this property + return tween; + } + } +} + +function defaultPrefilter( elem, props, opts ) { + var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display, + isBox = "width" in props || "height" in props, + anim = this, + orig = {}, + style = elem.style, + hidden = elem.nodeType && isHiddenWithinTree( elem ), + dataShow = dataPriv.get( elem, "fxshow" ); + + // Queue-skipping animations hijack the fx hooks + if ( !opts.queue ) { + hooks = jQuery._queueHooks( elem, "fx" ); + if ( hooks.unqueued == null ) { + hooks.unqueued = 0; + oldfire = hooks.empty.fire; + hooks.empty.fire = function() { + if ( !hooks.unqueued ) { + oldfire(); + } + }; + } + hooks.unqueued++; + + anim.always( function() { + + // Ensure the complete handler is called before this completes + anim.always( function() { + hooks.unqueued--; + if ( !jQuery.queue( elem, "fx" ).length ) { + hooks.empty.fire(); + } + } ); + } ); + } + + // Detect show/hide animations + for ( prop in props ) { + value = props[ prop ]; + if ( rfxtypes.test( value ) ) { + delete props[ prop ]; + toggle = toggle || value === "toggle"; + if ( value === ( hidden ? "hide" : "show" ) ) { + + // Pretend to be hidden if this is a "show" and + // there is still data from a stopped show/hide + if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) { + hidden = true; + + // Ignore all other no-op show/hide data + } else { + continue; + } + } + orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop ); + } + } + + // Bail out if this is a no-op like .hide().hide() + propTween = !jQuery.isEmptyObject( props ); + if ( !propTween && jQuery.isEmptyObject( orig ) ) { + return; + } + + // Restrict "overflow" and "display" styles during box animations + if ( isBox && elem.nodeType === 1 ) { + + // Support: IE <=9 - 11, Edge 12 - 15 + // Record all 3 overflow attributes because IE does not infer the shorthand + // from identically-valued overflowX and overflowY and Edge just mirrors + // the overflowX value there. + opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; + + // Identify a display type, preferring old show/hide data over the CSS cascade + restoreDisplay = dataShow && dataShow.display; + if ( restoreDisplay == null ) { + restoreDisplay = dataPriv.get( elem, "display" ); + } + display = jQuery.css( elem, "display" ); + if ( display === "none" ) { + if ( restoreDisplay ) { + display = restoreDisplay; + } else { + + // Get nonempty value(s) by temporarily forcing visibility + showHide( [ elem ], true ); + restoreDisplay = elem.style.display || restoreDisplay; + display = jQuery.css( elem, "display" ); + showHide( [ elem ] ); + } + } + + // Animate inline elements as inline-block + if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) { + if ( jQuery.css( elem, "float" ) === "none" ) { + + // Restore the original display value at the end of pure show/hide animations + if ( !propTween ) { + anim.done( function() { + style.display = restoreDisplay; + } ); + if ( restoreDisplay == null ) { + display = style.display; + restoreDisplay = display === "none" ? "" : display; + } + } + style.display = "inline-block"; + } + } + } + + if ( opts.overflow ) { + style.overflow = "hidden"; + anim.always( function() { + style.overflow = opts.overflow[ 0 ]; + style.overflowX = opts.overflow[ 1 ]; + style.overflowY = opts.overflow[ 2 ]; + } ); + } + + // Implement show/hide animations + propTween = false; + for ( prop in orig ) { + + // General show/hide setup for this element animation + if ( !propTween ) { + if ( dataShow ) { + if ( "hidden" in dataShow ) { + hidden = dataShow.hidden; + } + } else { + dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } ); + } + + // Store hidden/visible for toggle so `.stop().toggle()` "reverses" + if ( toggle ) { + dataShow.hidden = !hidden; + } + + // Show elements before animating them + if ( hidden ) { + showHide( [ elem ], true ); + } + + /* eslint-disable no-loop-func */ + + anim.done( function() { + + /* eslint-enable no-loop-func */ + + // The final step of a "hide" animation is actually hiding the element + if ( !hidden ) { + showHide( [ elem ] ); + } + dataPriv.remove( elem, "fxshow" ); + for ( prop in orig ) { + jQuery.style( elem, prop, orig[ prop ] ); + } + } ); + } + + // Per-property setup + propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim ); + if ( !( prop in dataShow ) ) { + dataShow[ prop ] = propTween.start; + if ( hidden ) { + propTween.end = propTween.start; + propTween.start = 0; + } + } + } +} + +function propFilter( props, specialEasing ) { + var index, name, easing, value, hooks; + + // camelCase, specialEasing and expand cssHook pass + for ( index in props ) { + name = camelCase( index ); + easing = specialEasing[ name ]; + value = props[ index ]; + if ( Array.isArray( value ) ) { + easing = value[ 1 ]; + value = props[ index ] = value[ 0 ]; + } + + if ( index !== name ) { + props[ name ] = value; + delete props[ index ]; + } + + hooks = jQuery.cssHooks[ name ]; + if ( hooks && "expand" in hooks ) { + value = hooks.expand( value ); + delete props[ name ]; + + // Not quite $.extend, this won't overwrite existing keys. + // Reusing 'index' because we have the correct "name" + for ( index in value ) { + if ( !( index in props ) ) { + props[ index ] = value[ index ]; + specialEasing[ index ] = easing; + } + } + } else { + specialEasing[ name ] = easing; + } + } +} + +function Animation( elem, properties, options ) { + var result, + stopped, + index = 0, + length = Animation.prefilters.length, + deferred = jQuery.Deferred().always( function() { + + // Don't match elem in the :animated selector + delete tick.elem; + } ), + tick = function() { + if ( stopped ) { + return false; + } + var currentTime = fxNow || createFxNow(), + remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), + + // Support: Android 2.3 only + // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497) + temp = remaining / animation.duration || 0, + percent = 1 - temp, + index = 0, + length = animation.tweens.length; + + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( percent ); + } + + deferred.notifyWith( elem, [ animation, percent, remaining ] ); + + // If there's more to do, yield + if ( percent < 1 && length ) { + return remaining; + } + + // If this was an empty animation, synthesize a final progress notification + if ( !length ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + } + + // Resolve the animation and report its conclusion + deferred.resolveWith( elem, [ animation ] ); + return false; + }, + animation = deferred.promise( { + elem: elem, + props: jQuery.extend( {}, properties ), + opts: jQuery.extend( true, { + specialEasing: {}, + easing: jQuery.easing._default + }, options ), + originalProperties: properties, + originalOptions: options, + startTime: fxNow || createFxNow(), + duration: options.duration, + tweens: [], + createTween: function( prop, end ) { + var tween = jQuery.Tween( elem, animation.opts, prop, end, + animation.opts.specialEasing[ prop ] || animation.opts.easing ); + animation.tweens.push( tween ); + return tween; + }, + stop: function( gotoEnd ) { + var index = 0, + + // If we are going to the end, we want to run all the tweens + // otherwise we skip this part + length = gotoEnd ? animation.tweens.length : 0; + if ( stopped ) { + return this; + } + stopped = true; + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( 1 ); + } + + // Resolve when we played the last frame; otherwise, reject + if ( gotoEnd ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + deferred.resolveWith( elem, [ animation, gotoEnd ] ); + } else { + deferred.rejectWith( elem, [ animation, gotoEnd ] ); + } + return this; + } + } ), + props = animation.props; + + propFilter( props, animation.opts.specialEasing ); + + for ( ; index < length; index++ ) { + result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts ); + if ( result ) { + if ( isFunction( result.stop ) ) { + jQuery._queueHooks( animation.elem, animation.opts.queue ).stop = + result.stop.bind( result ); + } + return result; + } + } + + jQuery.map( props, createTween, animation ); + + if ( isFunction( animation.opts.start ) ) { + animation.opts.start.call( elem, animation ); + } + + // Attach callbacks from options + animation + .progress( animation.opts.progress ) + .done( animation.opts.done, animation.opts.complete ) + .fail( animation.opts.fail ) + .always( animation.opts.always ); + + jQuery.fx.timer( + jQuery.extend( tick, { + elem: elem, + anim: animation, + queue: animation.opts.queue + } ) + ); + + return animation; +} + +jQuery.Animation = jQuery.extend( Animation, { + + tweeners: { + "*": [ function( prop, value ) { + var tween = this.createTween( prop, value ); + adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween ); + return tween; + } ] + }, + + tweener: function( props, callback ) { + if ( isFunction( props ) ) { + callback = props; + props = [ "*" ]; + } else { + props = props.match( rnothtmlwhite ); + } + + var prop, + index = 0, + length = props.length; + + for ( ; index < length; index++ ) { + prop = props[ index ]; + Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || []; + Animation.tweeners[ prop ].unshift( callback ); + } + }, + + prefilters: [ defaultPrefilter ], + + prefilter: function( callback, prepend ) { + if ( prepend ) { + Animation.prefilters.unshift( callback ); + } else { + Animation.prefilters.push( callback ); + } + } +} ); + +jQuery.speed = function( speed, easing, fn ) { + var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { + complete: fn || !fn && easing || + isFunction( speed ) && speed, + duration: speed, + easing: fn && easing || easing && !isFunction( easing ) && easing + }; + + // Go to the end state if fx are off + if ( jQuery.fx.off ) { + opt.duration = 0; + + } else { + if ( typeof opt.duration !== "number" ) { + if ( opt.duration in jQuery.fx.speeds ) { + opt.duration = jQuery.fx.speeds[ opt.duration ]; + + } else { + opt.duration = jQuery.fx.speeds._default; + } + } + } + + // Normalize opt.queue - true/undefined/null -> "fx" + if ( opt.queue == null || opt.queue === true ) { + opt.queue = "fx"; + } + + // Queueing + opt.old = opt.complete; + + opt.complete = function() { + if ( isFunction( opt.old ) ) { + opt.old.call( this ); + } + + if ( opt.queue ) { + jQuery.dequeue( this, opt.queue ); + } + }; + + return opt; +}; + +jQuery.fn.extend( { + fadeTo: function( speed, to, easing, callback ) { + + // Show any hidden elements after setting opacity to 0 + return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show() + + // Animate to the value specified + .end().animate( { opacity: to }, speed, easing, callback ); + }, + animate: function( prop, speed, easing, callback ) { + var empty = jQuery.isEmptyObject( prop ), + optall = jQuery.speed( speed, easing, callback ), + doAnimation = function() { + + // Operate on a copy of prop so per-property easing won't be lost + var anim = Animation( this, jQuery.extend( {}, prop ), optall ); + + // Empty animations, or finishing resolves immediately + if ( empty || dataPriv.get( this, "finish" ) ) { + anim.stop( true ); + } + }; + + doAnimation.finish = doAnimation; + + return empty || optall.queue === false ? + this.each( doAnimation ) : + this.queue( optall.queue, doAnimation ); + }, + stop: function( type, clearQueue, gotoEnd ) { + var stopQueue = function( hooks ) { + var stop = hooks.stop; + delete hooks.stop; + stop( gotoEnd ); + }; + + if ( typeof type !== "string" ) { + gotoEnd = clearQueue; + clearQueue = type; + type = undefined; + } + if ( clearQueue ) { + this.queue( type || "fx", [] ); + } + + return this.each( function() { + var dequeue = true, + index = type != null && type + "queueHooks", + timers = jQuery.timers, + data = dataPriv.get( this ); + + if ( index ) { + if ( data[ index ] && data[ index ].stop ) { + stopQueue( data[ index ] ); + } + } else { + for ( index in data ) { + if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { + stopQueue( data[ index ] ); + } + } + } + + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && + ( type == null || timers[ index ].queue === type ) ) { + + timers[ index ].anim.stop( gotoEnd ); + dequeue = false; + timers.splice( index, 1 ); + } + } + + // Start the next in the queue if the last step wasn't forced. + // Timers currently will call their complete callbacks, which + // will dequeue but only if they were gotoEnd. + if ( dequeue || !gotoEnd ) { + jQuery.dequeue( this, type ); + } + } ); + }, + finish: function( type ) { + if ( type !== false ) { + type = type || "fx"; + } + return this.each( function() { + var index, + data = dataPriv.get( this ), + queue = data[ type + "queue" ], + hooks = data[ type + "queueHooks" ], + timers = jQuery.timers, + length = queue ? queue.length : 0; + + // Enable finishing flag on private data + data.finish = true; + + // Empty the queue first + jQuery.queue( this, type, [] ); + + if ( hooks && hooks.stop ) { + hooks.stop.call( this, true ); + } + + // Look for any active animations, and finish them + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && timers[ index ].queue === type ) { + timers[ index ].anim.stop( true ); + timers.splice( index, 1 ); + } + } + + // Look for any animations in the old queue and finish them + for ( index = 0; index < length; index++ ) { + if ( queue[ index ] && queue[ index ].finish ) { + queue[ index ].finish.call( this ); + } + } + + // Turn off finishing flag + delete data.finish; + } ); + } +} ); + +jQuery.each( [ "toggle", "show", "hide" ], function( _i, name ) { + var cssFn = jQuery.fn[ name ]; + jQuery.fn[ name ] = function( speed, easing, callback ) { + return speed == null || typeof speed === "boolean" ? + cssFn.apply( this, arguments ) : + this.animate( genFx( name, true ), speed, easing, callback ); + }; +} ); + +// Generate shortcuts for custom animations +jQuery.each( { + slideDown: genFx( "show" ), + slideUp: genFx( "hide" ), + slideToggle: genFx( "toggle" ), + fadeIn: { opacity: "show" }, + fadeOut: { opacity: "hide" }, + fadeToggle: { opacity: "toggle" } +}, function( name, props ) { + jQuery.fn[ name ] = function( speed, easing, callback ) { + return this.animate( props, speed, easing, callback ); + }; +} ); + +jQuery.timers = []; +jQuery.fx.tick = function() { + var timer, + i = 0, + timers = jQuery.timers; + + fxNow = Date.now(); + + for ( ; i < timers.length; i++ ) { + timer = timers[ i ]; + + // Run the timer and safely remove it when done (allowing for external removal) + if ( !timer() && timers[ i ] === timer ) { + timers.splice( i--, 1 ); + } + } + + if ( !timers.length ) { + jQuery.fx.stop(); + } + fxNow = undefined; +}; + +jQuery.fx.timer = function( timer ) { + jQuery.timers.push( timer ); + jQuery.fx.start(); +}; + +jQuery.fx.interval = 13; +jQuery.fx.start = function() { + if ( inProgress ) { + return; + } + + inProgress = true; + schedule(); +}; + +jQuery.fx.stop = function() { + inProgress = null; +}; + +jQuery.fx.speeds = { + slow: 600, + fast: 200, + + // Default speed + _default: 400 +}; + + +// Based off of the plugin by Clint Helfers, with permission. +// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/ +jQuery.fn.delay = function( time, type ) { + time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; + type = type || "fx"; + + return this.queue( type, function( next, hooks ) { + var timeout = window.setTimeout( next, time ); + hooks.stop = function() { + window.clearTimeout( timeout ); + }; + } ); +}; + + +( function() { + var input = document.createElement( "input" ), + select = document.createElement( "select" ), + opt = select.appendChild( document.createElement( "option" ) ); + + input.type = "checkbox"; + + // Support: Android <=4.3 only + // Default value for a checkbox should be "on" + support.checkOn = input.value !== ""; + + // Support: IE <=11 only + // Must access selectedIndex to make default options select + support.optSelected = opt.selected; + + // Support: IE <=11 only + // An input loses its value after becoming a radio + input = document.createElement( "input" ); + input.value = "t"; + input.type = "radio"; + support.radioValue = input.value === "t"; +} )(); + + +var boolHook, + attrHandle = jQuery.expr.attrHandle; + +jQuery.fn.extend( { + attr: function( name, value ) { + return access( this, jQuery.attr, name, value, arguments.length > 1 ); + }, + + removeAttr: function( name ) { + return this.each( function() { + jQuery.removeAttr( this, name ); + } ); + } +} ); + +jQuery.extend( { + attr: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set attributes on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + // Fallback to prop when attributes are not supported + if ( typeof elem.getAttribute === "undefined" ) { + return jQuery.prop( elem, name, value ); + } + + // Attribute hooks are determined by the lowercase version + // Grab necessary hook if one is defined + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + hooks = jQuery.attrHooks[ name.toLowerCase() ] || + ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined ); + } + + if ( value !== undefined ) { + if ( value === null ) { + jQuery.removeAttr( elem, name ); + return; + } + + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + elem.setAttribute( name, value + "" ); + return value; + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + ret = jQuery.find.attr( elem, name ); + + // Non-existent attributes return null, we normalize to undefined + return ret == null ? undefined : ret; + }, + + attrHooks: { + type: { + set: function( elem, value ) { + if ( !support.radioValue && value === "radio" && + nodeName( elem, "input" ) ) { + var val = elem.value; + elem.setAttribute( "type", value ); + if ( val ) { + elem.value = val; + } + return value; + } + } + } + }, + + removeAttr: function( elem, value ) { + var name, + i = 0, + + // Attribute names can contain non-HTML whitespace characters + // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2 + attrNames = value && value.match( rnothtmlwhite ); + + if ( attrNames && elem.nodeType === 1 ) { + while ( ( name = attrNames[ i++ ] ) ) { + elem.removeAttribute( name ); + } + } + } +} ); + +// Hooks for boolean attributes +boolHook = { + set: function( elem, value, name ) { + if ( value === false ) { + + // Remove boolean attributes when set to false + jQuery.removeAttr( elem, name ); + } else { + elem.setAttribute( name, name ); + } + return name; + } +}; + +jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( _i, name ) { + var getter = attrHandle[ name ] || jQuery.find.attr; + + attrHandle[ name ] = function( elem, name, isXML ) { + var ret, handle, + lowercaseName = name.toLowerCase(); + + if ( !isXML ) { + + // Avoid an infinite loop by temporarily removing this function from the getter + handle = attrHandle[ lowercaseName ]; + attrHandle[ lowercaseName ] = ret; + ret = getter( elem, name, isXML ) != null ? + lowercaseName : + null; + attrHandle[ lowercaseName ] = handle; + } + return ret; + }; +} ); + + + + +var rfocusable = /^(?:input|select|textarea|button)$/i, + rclickable = /^(?:a|area)$/i; + +jQuery.fn.extend( { + prop: function( name, value ) { + return access( this, jQuery.prop, name, value, arguments.length > 1 ); + }, + + removeProp: function( name ) { + return this.each( function() { + delete this[ jQuery.propFix[ name ] || name ]; + } ); + } +} ); + +jQuery.extend( { + prop: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set properties on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + + // Fix name and attach hooks + name = jQuery.propFix[ name ] || name; + hooks = jQuery.propHooks[ name ]; + } + + if ( value !== undefined ) { + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + return ( elem[ name ] = value ); + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + return elem[ name ]; + }, + + propHooks: { + tabIndex: { + get: function( elem ) { + + // Support: IE <=9 - 11 only + // elem.tabIndex doesn't always return the + // correct value when it hasn't been explicitly set + // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ + // Use proper attribute retrieval(#12072) + var tabindex = jQuery.find.attr( elem, "tabindex" ); + + if ( tabindex ) { + return parseInt( tabindex, 10 ); + } + + if ( + rfocusable.test( elem.nodeName ) || + rclickable.test( elem.nodeName ) && + elem.href + ) { + return 0; + } + + return -1; + } + } + }, + + propFix: { + "for": "htmlFor", + "class": "className" + } +} ); + +// Support: IE <=11 only +// Accessing the selectedIndex property +// forces the browser to respect setting selected +// on the option +// The getter ensures a default option is selected +// when in an optgroup +// eslint rule "no-unused-expressions" is disabled for this code +// since it considers such accessions noop +if ( !support.optSelected ) { + jQuery.propHooks.selected = { + get: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent && parent.parentNode ) { + parent.parentNode.selectedIndex; + } + return null; + }, + set: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent ) { + parent.selectedIndex; + + if ( parent.parentNode ) { + parent.parentNode.selectedIndex; + } + } + } + }; +} + +jQuery.each( [ + "tabIndex", + "readOnly", + "maxLength", + "cellSpacing", + "cellPadding", + "rowSpan", + "colSpan", + "useMap", + "frameBorder", + "contentEditable" +], function() { + jQuery.propFix[ this.toLowerCase() ] = this; +} ); + + + + + // Strip and collapse whitespace according to HTML spec + // https://infra.spec.whatwg.org/#strip-and-collapse-ascii-whitespace + function stripAndCollapse( value ) { + var tokens = value.match( rnothtmlwhite ) || []; + return tokens.join( " " ); + } + + +function getClass( elem ) { + return elem.getAttribute && elem.getAttribute( "class" ) || ""; +} + +function classesToArray( value ) { + if ( Array.isArray( value ) ) { + return value; + } + if ( typeof value === "string" ) { + return value.match( rnothtmlwhite ) || []; + } + return []; +} + +jQuery.fn.extend( { + addClass: function( value ) { + var classes, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).addClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + classes = classesToArray( value ); + + if ( classes.length ) { + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = classes[ j++ ] ) ) { + if ( cur.indexOf( " " + clazz + " " ) < 0 ) { + cur += clazz + " "; + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + removeClass: function( value ) { + var classes, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + if ( !arguments.length ) { + return this.attr( "class", "" ); + } + + classes = classesToArray( value ); + + if ( classes.length ) { + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + + // This expression is here for better compressibility (see addClass) + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = classes[ j++ ] ) ) { + + // Remove *all* instances + while ( cur.indexOf( " " + clazz + " " ) > -1 ) { + cur = cur.replace( " " + clazz + " ", " " ); + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + toggleClass: function( value, stateVal ) { + var type = typeof value, + isValidValue = type === "string" || Array.isArray( value ); + + if ( typeof stateVal === "boolean" && isValidValue ) { + return stateVal ? this.addClass( value ) : this.removeClass( value ); + } + + if ( isFunction( value ) ) { + return this.each( function( i ) { + jQuery( this ).toggleClass( + value.call( this, i, getClass( this ), stateVal ), + stateVal + ); + } ); + } + + return this.each( function() { + var className, i, self, classNames; + + if ( isValidValue ) { + + // Toggle individual class names + i = 0; + self = jQuery( this ); + classNames = classesToArray( value ); + + while ( ( className = classNames[ i++ ] ) ) { + + // Check each className given, space separated list + if ( self.hasClass( className ) ) { + self.removeClass( className ); + } else { + self.addClass( className ); + } + } + + // Toggle whole class name + } else if ( value === undefined || type === "boolean" ) { + className = getClass( this ); + if ( className ) { + + // Store className if set + dataPriv.set( this, "__className__", className ); + } + + // If the element has a class name or if we're passed `false`, + // then remove the whole classname (if there was one, the above saved it). + // Otherwise bring back whatever was previously saved (if anything), + // falling back to the empty string if nothing was stored. + if ( this.setAttribute ) { + this.setAttribute( "class", + className || value === false ? + "" : + dataPriv.get( this, "__className__" ) || "" + ); + } + } + } ); + }, + + hasClass: function( selector ) { + var className, elem, + i = 0; + + className = " " + selector + " "; + while ( ( elem = this[ i++ ] ) ) { + if ( elem.nodeType === 1 && + ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) { + return true; + } + } + + return false; + } +} ); + + + + +var rreturn = /\r/g; + +jQuery.fn.extend( { + val: function( value ) { + var hooks, ret, valueIsFunction, + elem = this[ 0 ]; + + if ( !arguments.length ) { + if ( elem ) { + hooks = jQuery.valHooks[ elem.type ] || + jQuery.valHooks[ elem.nodeName.toLowerCase() ]; + + if ( hooks && + "get" in hooks && + ( ret = hooks.get( elem, "value" ) ) !== undefined + ) { + return ret; + } + + ret = elem.value; + + // Handle most common string cases + if ( typeof ret === "string" ) { + return ret.replace( rreturn, "" ); + } + + // Handle cases where value is null/undef or number + return ret == null ? "" : ret; + } + + return; + } + + valueIsFunction = isFunction( value ); + + return this.each( function( i ) { + var val; + + if ( this.nodeType !== 1 ) { + return; + } + + if ( valueIsFunction ) { + val = value.call( this, i, jQuery( this ).val() ); + } else { + val = value; + } + + // Treat null/undefined as ""; convert numbers to string + if ( val == null ) { + val = ""; + + } else if ( typeof val === "number" ) { + val += ""; + + } else if ( Array.isArray( val ) ) { + val = jQuery.map( val, function( value ) { + return value == null ? "" : value + ""; + } ); + } + + hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; + + // If set returns undefined, fall back to normal setting + if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { + this.value = val; + } + } ); + } +} ); + +jQuery.extend( { + valHooks: { + option: { + get: function( elem ) { + + var val = jQuery.find.attr( elem, "value" ); + return val != null ? + val : + + // Support: IE <=10 - 11 only + // option.text throws exceptions (#14686, #14858) + // Strip and collapse whitespace + // https://html.spec.whatwg.org/#strip-and-collapse-whitespace + stripAndCollapse( jQuery.text( elem ) ); + } + }, + select: { + get: function( elem ) { + var value, option, i, + options = elem.options, + index = elem.selectedIndex, + one = elem.type === "select-one", + values = one ? null : [], + max = one ? index + 1 : options.length; + + if ( index < 0 ) { + i = max; + + } else { + i = one ? index : 0; + } + + // Loop through all the selected options + for ( ; i < max; i++ ) { + option = options[ i ]; + + // Support: IE <=9 only + // IE8-9 doesn't update selected after form reset (#2551) + if ( ( option.selected || i === index ) && + + // Don't return options that are disabled or in a disabled optgroup + !option.disabled && + ( !option.parentNode.disabled || + !nodeName( option.parentNode, "optgroup" ) ) ) { + + // Get the specific value for the option + value = jQuery( option ).val(); + + // We don't need an array for one selects + if ( one ) { + return value; + } + + // Multi-Selects return an array + values.push( value ); + } + } + + return values; + }, + + set: function( elem, value ) { + var optionSet, option, + options = elem.options, + values = jQuery.makeArray( value ), + i = options.length; + + while ( i-- ) { + option = options[ i ]; + + /* eslint-disable no-cond-assign */ + + if ( option.selected = + jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 + ) { + optionSet = true; + } + + /* eslint-enable no-cond-assign */ + } + + // Force browsers to behave consistently when non-matching value is set + if ( !optionSet ) { + elem.selectedIndex = -1; + } + return values; + } + } + } +} ); + +// Radios and checkboxes getter/setter +jQuery.each( [ "radio", "checkbox" ], function() { + jQuery.valHooks[ this ] = { + set: function( elem, value ) { + if ( Array.isArray( value ) ) { + return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); + } + } + }; + if ( !support.checkOn ) { + jQuery.valHooks[ this ].get = function( elem ) { + return elem.getAttribute( "value" ) === null ? "on" : elem.value; + }; + } +} ); + + + + +// Return jQuery for attributes-only inclusion + + +support.focusin = "onfocusin" in window; + + +var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, + stopPropagationCallback = function( e ) { + e.stopPropagation(); + }; + +jQuery.extend( jQuery.event, { + + trigger: function( event, data, elem, onlyHandlers ) { + + var i, cur, tmp, bubbleType, ontype, handle, special, lastElement, + eventPath = [ elem || document ], + type = hasOwn.call( event, "type" ) ? event.type : event, + namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : []; + + cur = lastElement = tmp = elem = elem || document; + + // Don't do events on text and comment nodes + if ( elem.nodeType === 3 || elem.nodeType === 8 ) { + return; + } + + // focus/blur morphs to focusin/out; ensure we're not firing them right now + if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { + return; + } + + if ( type.indexOf( "." ) > -1 ) { + + // Namespaced trigger; create a regexp to match event type in handle() + namespaces = type.split( "." ); + type = namespaces.shift(); + namespaces.sort(); + } + ontype = type.indexOf( ":" ) < 0 && "on" + type; + + // Caller can pass in a jQuery.Event object, Object, or just an event type string + event = event[ jQuery.expando ] ? + event : + new jQuery.Event( type, typeof event === "object" && event ); + + // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) + event.isTrigger = onlyHandlers ? 2 : 3; + event.namespace = namespaces.join( "." ); + event.rnamespace = event.namespace ? + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) : + null; + + // Clean up the event in case it is being reused + event.result = undefined; + if ( !event.target ) { + event.target = elem; + } + + // Clone any incoming data and prepend the event, creating the handler arg list + data = data == null ? + [ event ] : + jQuery.makeArray( data, [ event ] ); + + // Allow special events to draw outside the lines + special = jQuery.event.special[ type ] || {}; + if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { + return; + } + + // Determine event propagation path in advance, per W3C events spec (#9951) + // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) + if ( !onlyHandlers && !special.noBubble && !isWindow( elem ) ) { + + bubbleType = special.delegateType || type; + if ( !rfocusMorph.test( bubbleType + type ) ) { + cur = cur.parentNode; + } + for ( ; cur; cur = cur.parentNode ) { + eventPath.push( cur ); + tmp = cur; + } + + // Only add window if we got to document (e.g., not plain obj or detached DOM) + if ( tmp === ( elem.ownerDocument || document ) ) { + eventPath.push( tmp.defaultView || tmp.parentWindow || window ); + } + } + + // Fire handlers on the event path + i = 0; + while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) { + lastElement = cur; + event.type = i > 1 ? + bubbleType : + special.bindType || type; + + // jQuery handler + handle = ( dataPriv.get( cur, "events" ) || Object.create( null ) )[ event.type ] && + dataPriv.get( cur, "handle" ); + if ( handle ) { + handle.apply( cur, data ); + } + + // Native handler + handle = ontype && cur[ ontype ]; + if ( handle && handle.apply && acceptData( cur ) ) { + event.result = handle.apply( cur, data ); + if ( event.result === false ) { + event.preventDefault(); + } + } + } + event.type = type; + + // If nobody prevented the default action, do it now + if ( !onlyHandlers && !event.isDefaultPrevented() ) { + + if ( ( !special._default || + special._default.apply( eventPath.pop(), data ) === false ) && + acceptData( elem ) ) { + + // Call a native DOM method on the target with the same name as the event. + // Don't do default actions on window, that's where global variables be (#6170) + if ( ontype && isFunction( elem[ type ] ) && !isWindow( elem ) ) { + + // Don't re-trigger an onFOO event when we call its FOO() method + tmp = elem[ ontype ]; + + if ( tmp ) { + elem[ ontype ] = null; + } + + // Prevent re-triggering of the same event, since we already bubbled it above + jQuery.event.triggered = type; + + if ( event.isPropagationStopped() ) { + lastElement.addEventListener( type, stopPropagationCallback ); + } + + elem[ type ](); + + if ( event.isPropagationStopped() ) { + lastElement.removeEventListener( type, stopPropagationCallback ); + } + + jQuery.event.triggered = undefined; + + if ( tmp ) { + elem[ ontype ] = tmp; + } + } + } + } + + return event.result; + }, + + // Piggyback on a donor event to simulate a different one + // Used only for `focus(in | out)` events + simulate: function( type, elem, event ) { + var e = jQuery.extend( + new jQuery.Event(), + event, + { + type: type, + isSimulated: true + } + ); + + jQuery.event.trigger( e, null, elem ); + } + +} ); + +jQuery.fn.extend( { + + trigger: function( type, data ) { + return this.each( function() { + jQuery.event.trigger( type, data, this ); + } ); + }, + triggerHandler: function( type, data ) { + var elem = this[ 0 ]; + if ( elem ) { + return jQuery.event.trigger( type, data, elem, true ); + } + } +} ); + + +// Support: Firefox <=44 +// Firefox doesn't have focus(in | out) events +// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787 +// +// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1 +// focus(in | out) events fire after focus & blur events, +// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order +// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857 +if ( !support.focusin ) { + jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) { + + // Attach a single capturing handler on the document while someone wants focusin/focusout + var handler = function( event ) { + jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) ); + }; + + jQuery.event.special[ fix ] = { + setup: function() { + + // Handle: regular nodes (via `this.ownerDocument`), window + // (via `this.document`) & document (via `this`). + var doc = this.ownerDocument || this.document || this, + attaches = dataPriv.access( doc, fix ); + + if ( !attaches ) { + doc.addEventListener( orig, handler, true ); + } + dataPriv.access( doc, fix, ( attaches || 0 ) + 1 ); + }, + teardown: function() { + var doc = this.ownerDocument || this.document || this, + attaches = dataPriv.access( doc, fix ) - 1; + + if ( !attaches ) { + doc.removeEventListener( orig, handler, true ); + dataPriv.remove( doc, fix ); + + } else { + dataPriv.access( doc, fix, attaches ); + } + } + }; + } ); +} +var location = window.location; + +var nonce = { guid: Date.now() }; + +var rquery = ( /\?/ ); + + + +// Cross-browser xml parsing +jQuery.parseXML = function( data ) { + var xml, parserErrorElem; + if ( !data || typeof data !== "string" ) { + return null; + } + + // Support: IE 9 - 11 only + // IE throws on parseFromString with invalid input. + try { + xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" ); + } catch ( e ) {} + + parserErrorElem = xml && xml.getElementsByTagName( "parsererror" )[ 0 ]; + if ( !xml || parserErrorElem ) { + jQuery.error( "Invalid XML: " + ( + parserErrorElem ? + jQuery.map( parserErrorElem.childNodes, function( el ) { + return el.textContent; + } ).join( "\n" ) : + data + ) ); + } + return xml; +}; + + +var + rbracket = /\[\]$/, + rCRLF = /\r?\n/g, + rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, + rsubmittable = /^(?:input|select|textarea|keygen)/i; + +function buildParams( prefix, obj, traditional, add ) { + var name; + + if ( Array.isArray( obj ) ) { + + // Serialize array item. + jQuery.each( obj, function( i, v ) { + if ( traditional || rbracket.test( prefix ) ) { + + // Treat each array item as a scalar. + add( prefix, v ); + + } else { + + // Item is non-scalar (array or object), encode its numeric index. + buildParams( + prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", + v, + traditional, + add + ); + } + } ); + + } else if ( !traditional && toType( obj ) === "object" ) { + + // Serialize object item. + for ( name in obj ) { + buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); + } + + } else { + + // Serialize scalar item. + add( prefix, obj ); + } +} + +// Serialize an array of form elements or a set of +// key/values into a query string +jQuery.param = function( a, traditional ) { + var prefix, + s = [], + add = function( key, valueOrFunction ) { + + // If value is a function, invoke it and use its return value + var value = isFunction( valueOrFunction ) ? + valueOrFunction() : + valueOrFunction; + + s[ s.length ] = encodeURIComponent( key ) + "=" + + encodeURIComponent( value == null ? "" : value ); + }; + + if ( a == null ) { + return ""; + } + + // If an array was passed in, assume that it is an array of form elements. + if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { + + // Serialize the form elements + jQuery.each( a, function() { + add( this.name, this.value ); + } ); + + } else { + + // If traditional, encode the "old" way (the way 1.3.2 or older + // did it), otherwise encode params recursively. + for ( prefix in a ) { + buildParams( prefix, a[ prefix ], traditional, add ); + } + } + + // Return the resulting serialization + return s.join( "&" ); +}; + +jQuery.fn.extend( { + serialize: function() { + return jQuery.param( this.serializeArray() ); + }, + serializeArray: function() { + return this.map( function() { + + // Can add propHook for "elements" to filter or add form elements + var elements = jQuery.prop( this, "elements" ); + return elements ? jQuery.makeArray( elements ) : this; + } ).filter( function() { + var type = this.type; + + // Use .is( ":disabled" ) so that fieldset[disabled] works + return this.name && !jQuery( this ).is( ":disabled" ) && + rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && + ( this.checked || !rcheckableType.test( type ) ); + } ).map( function( _i, elem ) { + var val = jQuery( this ).val(); + + if ( val == null ) { + return null; + } + + if ( Array.isArray( val ) ) { + return jQuery.map( val, function( val ) { + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ); + } + + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ).get(); + } +} ); + + +var + r20 = /%20/g, + rhash = /#.*$/, + rantiCache = /([?&])_=[^&]*/, + rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg, + + // #7653, #8125, #8152: local protocol detection + rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, + rnoContent = /^(?:GET|HEAD)$/, + rprotocol = /^\/\//, + + /* Prefilters + * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) + * 2) These are called: + * - BEFORE asking for a transport + * - AFTER param serialization (s.data is a string if s.processData is true) + * 3) key is the dataType + * 4) the catchall symbol "*" can be used + * 5) execution will start with transport dataType and THEN continue down to "*" if needed + */ + prefilters = {}, + + /* Transports bindings + * 1) key is the dataType + * 2) the catchall symbol "*" can be used + * 3) selection will start with transport dataType and THEN go to "*" if needed + */ + transports = {}, + + // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression + allTypes = "*/".concat( "*" ), + + // Anchor tag for parsing the document origin + originAnchor = document.createElement( "a" ); + +originAnchor.href = location.href; + +// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport +function addToPrefiltersOrTransports( structure ) { + + // dataTypeExpression is optional and defaults to "*" + return function( dataTypeExpression, func ) { + + if ( typeof dataTypeExpression !== "string" ) { + func = dataTypeExpression; + dataTypeExpression = "*"; + } + + var dataType, + i = 0, + dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || []; + + if ( isFunction( func ) ) { + + // For each dataType in the dataTypeExpression + while ( ( dataType = dataTypes[ i++ ] ) ) { + + // Prepend if requested + if ( dataType[ 0 ] === "+" ) { + dataType = dataType.slice( 1 ) || "*"; + ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func ); + + // Otherwise append + } else { + ( structure[ dataType ] = structure[ dataType ] || [] ).push( func ); + } + } + } + }; +} + +// Base inspection function for prefilters and transports +function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) { + + var inspected = {}, + seekingTransport = ( structure === transports ); + + function inspect( dataType ) { + var selected; + inspected[ dataType ] = true; + jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) { + var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR ); + if ( typeof dataTypeOrTransport === "string" && + !seekingTransport && !inspected[ dataTypeOrTransport ] ) { + + options.dataTypes.unshift( dataTypeOrTransport ); + inspect( dataTypeOrTransport ); + return false; + } else if ( seekingTransport ) { + return !( selected = dataTypeOrTransport ); + } + } ); + return selected; + } + + return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" ); +} + +// A special extend for ajax options +// that takes "flat" options (not to be deep extended) +// Fixes #9887 +function ajaxExtend( target, src ) { + var key, deep, + flatOptions = jQuery.ajaxSettings.flatOptions || {}; + + for ( key in src ) { + if ( src[ key ] !== undefined ) { + ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; + } + } + if ( deep ) { + jQuery.extend( true, target, deep ); + } + + return target; +} + +/* Handles responses to an ajax request: + * - finds the right dataType (mediates between content-type and expected dataType) + * - returns the corresponding response + */ +function ajaxHandleResponses( s, jqXHR, responses ) { + + var ct, type, finalDataType, firstDataType, + contents = s.contents, + dataTypes = s.dataTypes; + + // Remove auto dataType and get content-type in the process + while ( dataTypes[ 0 ] === "*" ) { + dataTypes.shift(); + if ( ct === undefined ) { + ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" ); + } + } + + // Check if we're dealing with a known content-type + if ( ct ) { + for ( type in contents ) { + if ( contents[ type ] && contents[ type ].test( ct ) ) { + dataTypes.unshift( type ); + break; + } + } + } + + // Check to see if we have a response for the expected dataType + if ( dataTypes[ 0 ] in responses ) { + finalDataType = dataTypes[ 0 ]; + } else { + + // Try convertible dataTypes + for ( type in responses ) { + if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) { + finalDataType = type; + break; + } + if ( !firstDataType ) { + firstDataType = type; + } + } + + // Or just use first one + finalDataType = finalDataType || firstDataType; + } + + // If we found a dataType + // We add the dataType to the list if needed + // and return the corresponding response + if ( finalDataType ) { + if ( finalDataType !== dataTypes[ 0 ] ) { + dataTypes.unshift( finalDataType ); + } + return responses[ finalDataType ]; + } +} + +/* Chain conversions given the request and the original response + * Also sets the responseXXX fields on the jqXHR instance + */ +function ajaxConvert( s, response, jqXHR, isSuccess ) { + var conv2, current, conv, tmp, prev, + converters = {}, + + // Work with a copy of dataTypes in case we need to modify it for conversion + dataTypes = s.dataTypes.slice(); + + // Create converters map with lowercased keys + if ( dataTypes[ 1 ] ) { + for ( conv in s.converters ) { + converters[ conv.toLowerCase() ] = s.converters[ conv ]; + } + } + + current = dataTypes.shift(); + + // Convert to each sequential dataType + while ( current ) { + + if ( s.responseFields[ current ] ) { + jqXHR[ s.responseFields[ current ] ] = response; + } + + // Apply the dataFilter if provided + if ( !prev && isSuccess && s.dataFilter ) { + response = s.dataFilter( response, s.dataType ); + } + + prev = current; + current = dataTypes.shift(); + + if ( current ) { + + // There's only work to do if current dataType is non-auto + if ( current === "*" ) { + + current = prev; + + // Convert response if prev dataType is non-auto and differs from current + } else if ( prev !== "*" && prev !== current ) { + + // Seek a direct converter + conv = converters[ prev + " " + current ] || converters[ "* " + current ]; + + // If none found, seek a pair + if ( !conv ) { + for ( conv2 in converters ) { + + // If conv2 outputs current + tmp = conv2.split( " " ); + if ( tmp[ 1 ] === current ) { + + // If prev can be converted to accepted input + conv = converters[ prev + " " + tmp[ 0 ] ] || + converters[ "* " + tmp[ 0 ] ]; + if ( conv ) { + + // Condense equivalence converters + if ( conv === true ) { + conv = converters[ conv2 ]; + + // Otherwise, insert the intermediate dataType + } else if ( converters[ conv2 ] !== true ) { + current = tmp[ 0 ]; + dataTypes.unshift( tmp[ 1 ] ); + } + break; + } + } + } + } + + // Apply converter (if not an equivalence) + if ( conv !== true ) { + + // Unless errors are allowed to bubble, catch and return them + if ( conv && s.throws ) { + response = conv( response ); + } else { + try { + response = conv( response ); + } catch ( e ) { + return { + state: "parsererror", + error: conv ? e : "No conversion from " + prev + " to " + current + }; + } + } + } + } + } + } + + return { state: "success", data: response }; +} + +jQuery.extend( { + + // Counter for holding the number of active queries + active: 0, + + // Last-Modified header cache for next request + lastModified: {}, + etag: {}, + + ajaxSettings: { + url: location.href, + type: "GET", + isLocal: rlocalProtocol.test( location.protocol ), + global: true, + processData: true, + async: true, + contentType: "application/x-www-form-urlencoded; charset=UTF-8", + + /* + timeout: 0, + data: null, + dataType: null, + username: null, + password: null, + cache: null, + throws: false, + traditional: false, + headers: {}, + */ + + accepts: { + "*": allTypes, + text: "text/plain", + html: "text/html", + xml: "application/xml, text/xml", + json: "application/json, text/javascript" + }, + + contents: { + xml: /\bxml\b/, + html: /\bhtml/, + json: /\bjson\b/ + }, + + responseFields: { + xml: "responseXML", + text: "responseText", + json: "responseJSON" + }, + + // Data converters + // Keys separate source (or catchall "*") and destination types with a single space + converters: { + + // Convert anything to text + "* text": String, + + // Text to html (true = no transformation) + "text html": true, + + // Evaluate text as a json expression + "text json": JSON.parse, + + // Parse text as xml + "text xml": jQuery.parseXML + }, + + // For options that shouldn't be deep extended: + // you can add your own custom options here if + // and when you create one that shouldn't be + // deep extended (see ajaxExtend) + flatOptions: { + url: true, + context: true + } + }, + + // Creates a full fledged settings object into target + // with both ajaxSettings and settings fields. + // If target is omitted, writes into ajaxSettings. + ajaxSetup: function( target, settings ) { + return settings ? + + // Building a settings object + ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) : + + // Extending ajaxSettings + ajaxExtend( jQuery.ajaxSettings, target ); + }, + + ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), + ajaxTransport: addToPrefiltersOrTransports( transports ), + + // Main method + ajax: function( url, options ) { + + // If url is an object, simulate pre-1.5 signature + if ( typeof url === "object" ) { + options = url; + url = undefined; + } + + // Force options to be an object + options = options || {}; + + var transport, + + // URL without anti-cache param + cacheURL, + + // Response headers + responseHeadersString, + responseHeaders, + + // timeout handle + timeoutTimer, + + // Url cleanup var + urlAnchor, + + // Request state (becomes false upon send and true upon completion) + completed, + + // To know if global events are to be dispatched + fireGlobals, + + // Loop variable + i, + + // uncached part of the url + uncached, + + // Create the final options object + s = jQuery.ajaxSetup( {}, options ), + + // Callbacks context + callbackContext = s.context || s, + + // Context for global events is callbackContext if it is a DOM node or jQuery collection + globalEventContext = s.context && + ( callbackContext.nodeType || callbackContext.jquery ) ? + jQuery( callbackContext ) : + jQuery.event, + + // Deferreds + deferred = jQuery.Deferred(), + completeDeferred = jQuery.Callbacks( "once memory" ), + + // Status-dependent callbacks + statusCode = s.statusCode || {}, + + // Headers (they are sent all at once) + requestHeaders = {}, + requestHeadersNames = {}, + + // Default abort message + strAbort = "canceled", + + // Fake xhr + jqXHR = { + readyState: 0, + + // Builds headers hashtable if needed + getResponseHeader: function( key ) { + var match; + if ( completed ) { + if ( !responseHeaders ) { + responseHeaders = {}; + while ( ( match = rheaders.exec( responseHeadersString ) ) ) { + responseHeaders[ match[ 1 ].toLowerCase() + " " ] = + ( responseHeaders[ match[ 1 ].toLowerCase() + " " ] || [] ) + .concat( match[ 2 ] ); + } + } + match = responseHeaders[ key.toLowerCase() + " " ]; + } + return match == null ? null : match.join( ", " ); + }, + + // Raw string + getAllResponseHeaders: function() { + return completed ? responseHeadersString : null; + }, + + // Caches the header + setRequestHeader: function( name, value ) { + if ( completed == null ) { + name = requestHeadersNames[ name.toLowerCase() ] = + requestHeadersNames[ name.toLowerCase() ] || name; + requestHeaders[ name ] = value; + } + return this; + }, + + // Overrides response content-type header + overrideMimeType: function( type ) { + if ( completed == null ) { + s.mimeType = type; + } + return this; + }, + + // Status-dependent callbacks + statusCode: function( map ) { + var code; + if ( map ) { + if ( completed ) { + + // Execute the appropriate callbacks + jqXHR.always( map[ jqXHR.status ] ); + } else { + + // Lazy-add the new callbacks in a way that preserves old ones + for ( code in map ) { + statusCode[ code ] = [ statusCode[ code ], map[ code ] ]; + } + } + } + return this; + }, + + // Cancel the request + abort: function( statusText ) { + var finalText = statusText || strAbort; + if ( transport ) { + transport.abort( finalText ); + } + done( 0, finalText ); + return this; + } + }; + + // Attach deferreds + deferred.promise( jqXHR ); + + // Add protocol if not provided (prefilters might expect it) + // Handle falsy url in the settings object (#10093: consistency with old signature) + // We also use the url parameter if available + s.url = ( ( url || s.url || location.href ) + "" ) + .replace( rprotocol, location.protocol + "//" ); + + // Alias method option to type as per ticket #12004 + s.type = options.method || options.type || s.method || s.type; + + // Extract dataTypes list + s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ]; + + // A cross-domain request is in order when the origin doesn't match the current origin. + if ( s.crossDomain == null ) { + urlAnchor = document.createElement( "a" ); + + // Support: IE <=8 - 11, Edge 12 - 15 + // IE throws exception on accessing the href property if url is malformed, + // e.g. http://example.com:80x/ + try { + urlAnchor.href = s.url; + + // Support: IE <=8 - 11 only + // Anchor's host property isn't correctly set when s.url is relative + urlAnchor.href = urlAnchor.href; + s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !== + urlAnchor.protocol + "//" + urlAnchor.host; + } catch ( e ) { + + // If there is an error parsing the URL, assume it is crossDomain, + // it can be rejected by the transport if it is invalid + s.crossDomain = true; + } + } + + // Convert data if not already a string + if ( s.data && s.processData && typeof s.data !== "string" ) { + s.data = jQuery.param( s.data, s.traditional ); + } + + // Apply prefilters + inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); + + // If request was aborted inside a prefilter, stop there + if ( completed ) { + return jqXHR; + } + + // We can fire global events as of now if asked to + // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118) + fireGlobals = jQuery.event && s.global; + + // Watch for a new set of requests + if ( fireGlobals && jQuery.active++ === 0 ) { + jQuery.event.trigger( "ajaxStart" ); + } + + // Uppercase the type + s.type = s.type.toUpperCase(); + + // Determine if request has content + s.hasContent = !rnoContent.test( s.type ); + + // Save the URL in case we're toying with the If-Modified-Since + // and/or If-None-Match header later on + // Remove hash to simplify url manipulation + cacheURL = s.url.replace( rhash, "" ); + + // More options handling for requests with no content + if ( !s.hasContent ) { + + // Remember the hash so we can put it back + uncached = s.url.slice( cacheURL.length ); + + // If data is available and should be processed, append data to url + if ( s.data && ( s.processData || typeof s.data === "string" ) ) { + cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data; + + // #9682: remove data so that it's not used in an eventual retry + delete s.data; + } + + // Add or update anti-cache param if needed + if ( s.cache === false ) { + cacheURL = cacheURL.replace( rantiCache, "$1" ); + uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce.guid++ ) + + uncached; + } + + // Put hash and anti-cache on the URL that will be requested (gh-1732) + s.url = cacheURL + uncached; + + // Change '%20' to '+' if this is encoded form body content (gh-2658) + } else if ( s.data && s.processData && + ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) { + s.data = s.data.replace( r20, "+" ); + } + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + if ( jQuery.lastModified[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] ); + } + if ( jQuery.etag[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] ); + } + } + + // Set the correct header, if data is being sent + if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { + jqXHR.setRequestHeader( "Content-Type", s.contentType ); + } + + // Set the Accepts header for the server, depending on the dataType + jqXHR.setRequestHeader( + "Accept", + s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ? + s.accepts[ s.dataTypes[ 0 ] ] + + ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : + s.accepts[ "*" ] + ); + + // Check for headers option + for ( i in s.headers ) { + jqXHR.setRequestHeader( i, s.headers[ i ] ); + } + + // Allow custom headers/mimetypes and early abort + if ( s.beforeSend && + ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) { + + // Abort if not done already and return + return jqXHR.abort(); + } + + // Aborting is no longer a cancellation + strAbort = "abort"; + + // Install callbacks on deferreds + completeDeferred.add( s.complete ); + jqXHR.done( s.success ); + jqXHR.fail( s.error ); + + // Get transport + transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); + + // If no transport, we auto-abort + if ( !transport ) { + done( -1, "No Transport" ); + } else { + jqXHR.readyState = 1; + + // Send global event + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); + } + + // If request was aborted inside ajaxSend, stop there + if ( completed ) { + return jqXHR; + } + + // Timeout + if ( s.async && s.timeout > 0 ) { + timeoutTimer = window.setTimeout( function() { + jqXHR.abort( "timeout" ); + }, s.timeout ); + } + + try { + completed = false; + transport.send( requestHeaders, done ); + } catch ( e ) { + + // Rethrow post-completion exceptions + if ( completed ) { + throw e; + } + + // Propagate others as results + done( -1, e ); + } + } + + // Callback for when everything is done + function done( status, nativeStatusText, responses, headers ) { + var isSuccess, success, error, response, modified, + statusText = nativeStatusText; + + // Ignore repeat invocations + if ( completed ) { + return; + } + + completed = true; + + // Clear timeout if it exists + if ( timeoutTimer ) { + window.clearTimeout( timeoutTimer ); + } + + // Dereference transport for early garbage collection + // (no matter how long the jqXHR object will be used) + transport = undefined; + + // Cache response headers + responseHeadersString = headers || ""; + + // Set readyState + jqXHR.readyState = status > 0 ? 4 : 0; + + // Determine if successful + isSuccess = status >= 200 && status < 300 || status === 304; + + // Get response data + if ( responses ) { + response = ajaxHandleResponses( s, jqXHR, responses ); + } + + // Use a noop converter for missing script but not if jsonp + if ( !isSuccess && + jQuery.inArray( "script", s.dataTypes ) > -1 && + jQuery.inArray( "json", s.dataTypes ) < 0 ) { + s.converters[ "text script" ] = function() {}; + } + + // Convert no matter what (that way responseXXX fields are always set) + response = ajaxConvert( s, response, jqXHR, isSuccess ); + + // If successful, handle type chaining + if ( isSuccess ) { + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + modified = jqXHR.getResponseHeader( "Last-Modified" ); + if ( modified ) { + jQuery.lastModified[ cacheURL ] = modified; + } + modified = jqXHR.getResponseHeader( "etag" ); + if ( modified ) { + jQuery.etag[ cacheURL ] = modified; + } + } + + // if no content + if ( status === 204 || s.type === "HEAD" ) { + statusText = "nocontent"; + + // if not modified + } else if ( status === 304 ) { + statusText = "notmodified"; + + // If we have data, let's convert it + } else { + statusText = response.state; + success = response.data; + error = response.error; + isSuccess = !error; + } + } else { + + // Extract error from statusText and normalize for non-aborts + error = statusText; + if ( status || !statusText ) { + statusText = "error"; + if ( status < 0 ) { + status = 0; + } + } + } + + // Set data for the fake xhr object + jqXHR.status = status; + jqXHR.statusText = ( nativeStatusText || statusText ) + ""; + + // Success/Error + if ( isSuccess ) { + deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); + } else { + deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); + } + + // Status-dependent callbacks + jqXHR.statusCode( statusCode ); + statusCode = undefined; + + if ( fireGlobals ) { + globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError", + [ jqXHR, s, isSuccess ? success : error ] ); + } + + // Complete + completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); + + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); + + // Handle the global AJAX counter + if ( !( --jQuery.active ) ) { + jQuery.event.trigger( "ajaxStop" ); + } + } + } + + return jqXHR; + }, + + getJSON: function( url, data, callback ) { + return jQuery.get( url, data, callback, "json" ); + }, + + getScript: function( url, callback ) { + return jQuery.get( url, undefined, callback, "script" ); + } +} ); + +jQuery.each( [ "get", "post" ], function( _i, method ) { + jQuery[ method ] = function( url, data, callback, type ) { + + // Shift arguments if data argument was omitted + if ( isFunction( data ) ) { + type = type || callback; + callback = data; + data = undefined; + } + + // The url can be an options object (which then must have .url) + return jQuery.ajax( jQuery.extend( { + url: url, + type: method, + dataType: type, + data: data, + success: callback + }, jQuery.isPlainObject( url ) && url ) ); + }; +} ); + +jQuery.ajaxPrefilter( function( s ) { + var i; + for ( i in s.headers ) { + if ( i.toLowerCase() === "content-type" ) { + s.contentType = s.headers[ i ] || ""; + } + } +} ); + + +jQuery._evalUrl = function( url, options, doc ) { + return jQuery.ajax( { + url: url, + + // Make this explicit, since user can override this through ajaxSetup (#11264) + type: "GET", + dataType: "script", + cache: true, + async: false, + global: false, + + // Only evaluate the response if it is successful (gh-4126) + // dataFilter is not invoked for failure responses, so using it instead + // of the default converter is kludgy but it works. + converters: { + "text script": function() {} + }, + dataFilter: function( response ) { + jQuery.globalEval( response, options, doc ); + } + } ); +}; + + +jQuery.fn.extend( { + wrapAll: function( html ) { + var wrap; + + if ( this[ 0 ] ) { + if ( isFunction( html ) ) { + html = html.call( this[ 0 ] ); + } + + // The elements to wrap the target around + wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true ); + + if ( this[ 0 ].parentNode ) { + wrap.insertBefore( this[ 0 ] ); + } + + wrap.map( function() { + var elem = this; + + while ( elem.firstElementChild ) { + elem = elem.firstElementChild; + } + + return elem; + } ).append( this ); + } + + return this; + }, + + wrapInner: function( html ) { + if ( isFunction( html ) ) { + return this.each( function( i ) { + jQuery( this ).wrapInner( html.call( this, i ) ); + } ); + } + + return this.each( function() { + var self = jQuery( this ), + contents = self.contents(); + + if ( contents.length ) { + contents.wrapAll( html ); + + } else { + self.append( html ); + } + } ); + }, + + wrap: function( html ) { + var htmlIsFunction = isFunction( html ); + + return this.each( function( i ) { + jQuery( this ).wrapAll( htmlIsFunction ? html.call( this, i ) : html ); + } ); + }, + + unwrap: function( selector ) { + this.parent( selector ).not( "body" ).each( function() { + jQuery( this ).replaceWith( this.childNodes ); + } ); + return this; + } +} ); + + +jQuery.expr.pseudos.hidden = function( elem ) { + return !jQuery.expr.pseudos.visible( elem ); +}; +jQuery.expr.pseudos.visible = function( elem ) { + return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length ); +}; + + + + +jQuery.ajaxSettings.xhr = function() { + try { + return new window.XMLHttpRequest(); + } catch ( e ) {} +}; + +var xhrSuccessStatus = { + + // File protocol always yields status code 0, assume 200 + 0: 200, + + // Support: IE <=9 only + // #1450: sometimes IE returns 1223 when it should be 204 + 1223: 204 + }, + xhrSupported = jQuery.ajaxSettings.xhr(); + +support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported ); +support.ajax = xhrSupported = !!xhrSupported; + +jQuery.ajaxTransport( function( options ) { + var callback, errorCallback; + + // Cross domain only allowed if supported through XMLHttpRequest + if ( support.cors || xhrSupported && !options.crossDomain ) { + return { + send: function( headers, complete ) { + var i, + xhr = options.xhr(); + + xhr.open( + options.type, + options.url, + options.async, + options.username, + options.password + ); + + // Apply custom fields if provided + if ( options.xhrFields ) { + for ( i in options.xhrFields ) { + xhr[ i ] = options.xhrFields[ i ]; + } + } + + // Override mime type if needed + if ( options.mimeType && xhr.overrideMimeType ) { + xhr.overrideMimeType( options.mimeType ); + } + + // X-Requested-With header + // For cross-domain requests, seeing as conditions for a preflight are + // akin to a jigsaw puzzle, we simply never set it to be sure. + // (it can always be set on a per-request basis or even using ajaxSetup) + // For same-domain requests, won't change header if already provided. + if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) { + headers[ "X-Requested-With" ] = "XMLHttpRequest"; + } + + // Set headers + for ( i in headers ) { + xhr.setRequestHeader( i, headers[ i ] ); + } + + // Callback + callback = function( type ) { + return function() { + if ( callback ) { + callback = errorCallback = xhr.onload = + xhr.onerror = xhr.onabort = xhr.ontimeout = + xhr.onreadystatechange = null; + + if ( type === "abort" ) { + xhr.abort(); + } else if ( type === "error" ) { + + // Support: IE <=9 only + // On a manual native abort, IE9 throws + // errors on any property access that is not readyState + if ( typeof xhr.status !== "number" ) { + complete( 0, "error" ); + } else { + complete( + + // File: protocol always yields status 0; see #8605, #14207 + xhr.status, + xhr.statusText + ); + } + } else { + complete( + xhrSuccessStatus[ xhr.status ] || xhr.status, + xhr.statusText, + + // Support: IE <=9 only + // IE9 has no XHR2 but throws on binary (trac-11426) + // For XHR2 non-text, let the caller handle it (gh-2498) + ( xhr.responseType || "text" ) !== "text" || + typeof xhr.responseText !== "string" ? + { binary: xhr.response } : + { text: xhr.responseText }, + xhr.getAllResponseHeaders() + ); + } + } + }; + }; + + // Listen to events + xhr.onload = callback(); + errorCallback = xhr.onerror = xhr.ontimeout = callback( "error" ); + + // Support: IE 9 only + // Use onreadystatechange to replace onabort + // to handle uncaught aborts + if ( xhr.onabort !== undefined ) { + xhr.onabort = errorCallback; + } else { + xhr.onreadystatechange = function() { + + // Check readyState before timeout as it changes + if ( xhr.readyState === 4 ) { + + // Allow onerror to be called first, + // but that will not handle a native abort + // Also, save errorCallback to a variable + // as xhr.onerror cannot be accessed + window.setTimeout( function() { + if ( callback ) { + errorCallback(); + } + } ); + } + }; + } + + // Create the abort callback + callback = callback( "abort" ); + + try { + + // Do send the request (this may raise an exception) + xhr.send( options.hasContent && options.data || null ); + } catch ( e ) { + + // #14683: Only rethrow if this hasn't been notified as an error yet + if ( callback ) { + throw e; + } + } + }, + + abort: function() { + if ( callback ) { + callback(); + } + } + }; + } +} ); + + + + +// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432) +jQuery.ajaxPrefilter( function( s ) { + if ( s.crossDomain ) { + s.contents.script = false; + } +} ); + +// Install script dataType +jQuery.ajaxSetup( { + accepts: { + script: "text/javascript, application/javascript, " + + "application/ecmascript, application/x-ecmascript" + }, + contents: { + script: /\b(?:java|ecma)script\b/ + }, + converters: { + "text script": function( text ) { + jQuery.globalEval( text ); + return text; + } + } +} ); + +// Handle cache's special case and crossDomain +jQuery.ajaxPrefilter( "script", function( s ) { + if ( s.cache === undefined ) { + s.cache = false; + } + if ( s.crossDomain ) { + s.type = "GET"; + } +} ); + +// Bind script tag hack transport +jQuery.ajaxTransport( "script", function( s ) { + + // This transport only deals with cross domain or forced-by-attrs requests + if ( s.crossDomain || s.scriptAttrs ) { + var script, callback; + return { + send: function( _, complete ) { + script = jQuery( " + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Follow the code style

+

We use the following tools to make the code style to be as consistent as possible:

+
+
    +
  • black, to format the code

  • +
  • flake8, to check the style and quality of the code

  • +
  • isort, to sort imports

  • +
+
+

The following versions of the above tools are used:

+
+
    +
  • black == 12.6b0

  • +
  • flake8 == 3.9.2

  • +
  • isort == 5.9.2

  • +
+
+

After running the following commands:

+
+
$ git clone https://github.com/k2-fsa/icefall
+$ cd icefall
+$ pip install pre-commit
+$ pre-commit install
+
+
+
+

it will run the following checks whenever you run git commit, automatically:

+
+
+../_images/pre-commit-check.png +
+

Fig. 7 pre-commit hooks invoked by git commit (Failed).

+
+
+
+

If any of the above checks failed, your git commit was not successful. +Please fix any issues reported by the check tools.

+
+

Hint

+

Some of the check tools, i.e., black and isort will modify +the files to be commited in-place. So please run git status +after failure to see which file has been modified by the tools +before you make any further changes.

+
+

After fixing all the failures, run git commit again and +it should succeed this time:

+
+
+../_images/pre-commit-check-success.png +
+

Fig. 8 pre-commit hooks invoked by git commit (Succeeded).

+
+
+
+

If you want to check the style of your code before git commit, you +can do the following:

+
+
$ cd icefall
+$ pip install black==21.6b0 flake8==3.9.2 isort==5.9.2
+$ black --check your_changed_file.py
+$ black your_changed_file.py  # modify it in-place
+$
+$ flake8 your_changed_file.py
+$
+$ isort --check your_changed_file.py  # modify it in-place
+$ isort your_changed_file.py
+
+
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/contributing/doc.html b/contributing/doc.html new file mode 100644 index 000000000..a70c4ab8e --- /dev/null +++ b/contributing/doc.html @@ -0,0 +1,156 @@ + + + + + + + Contributing to Documentation — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Contributing to Documentation

+

We use sphinx +for documentation.

+

Before writing documentation, you have to prepare the environment:

+
+
$ cd docs
+$ pip install -r requirements.txt
+
+
+
+

After setting up the environment, you are ready to write documentation. +Please refer to reStructuredText Primer +if you are not familiar with reStructuredText.

+

After writing some documentation, you can build the documentation locally +to preview what it looks like if it is published:

+
+
$ cd docs
+$ make html
+
+
+
+

The generated documentation is in docs/build/html and can be viewed +with the following commands:

+
+
$ cd docs/build/html
+$ python3 -m http.server
+
+
+
+

It will print:

+
Serving HTTP on 0.0.0.0 port 8000 (http://0.0.0.0:8000/) ...
+
+
+

Open your browser, go to http://0.0.0.0:8000/, and you will see +the following:

+
+
+../_images/doc-contrib.png +
+

Fig. 6 View generated documentation locally with python3 -m http.server.

+
+
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/contributing/how-to-create-a-recipe.html b/contributing/how-to-create-a-recipe.html new file mode 100644 index 000000000..9b03ee2e3 --- /dev/null +++ b/contributing/how-to-create-a-recipe.html @@ -0,0 +1,258 @@ + + + + + + + How to create a recipe — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

How to create a recipe

+
+

Hint

+

Please read Follow the code style to adjust your code sytle.

+
+
+

Caution

+

icefall is designed to be as Pythonic as possible. Please use +Python in your recipe if possible.

+
+
+

Data Preparation

+

We recommend you to prepare your training/test/validate dataset +with lhotse.

+

Please refer to https://lhotse.readthedocs.io/en/latest/index.html +for how to create a recipe in lhotse.

+
+

Hint

+

The yesno recipe in lhotse is a very good example.

+

Please refer to https://github.com/lhotse-speech/lhotse/pull/380, +which shows how to add a new recipe to lhotse.

+
+

Suppose you would like to add a recipe for a dataset named foo. +You can do the following:

+
$ cd egs
+$ mkdir -p foo/ASR
+$ cd foo/ASR
+$ touch prepare.sh
+$ chmod +x prepare.sh
+
+
+

If your dataset is very simple, please follow +egs/yesno/ASR/prepare.sh +to write your own prepare.sh. +Otherwise, please refer to +egs/librispeech/ASR/prepare.sh +to prepare your data.

+
+
+

Training

+

Assume you have a fancy model, called bar for the foo recipe, you can +organize your files in the following way:

+
$ cd egs/foo/ASR
+$ mkdir bar
+$ cd bar
+$ touch README.md model.py train.py decode.py asr_datamodule.py pretrained.py
+
+
+

For instance , the yesno recipe has a tdnn model and its directory structure +looks like the following:

+
egs/yesno/ASR/tdnn/
+|-- README.md
+|-- asr_datamodule.py
+|-- decode.py
+|-- model.py
+|-- pretrained.py
+`-- train.py
+
+
+

File description:

+
+
    +
  • README.md

    +

    It contains information of this recipe, e.g., how to run it, what the WER is, etc.

    +
  • +
  • asr_datamodule.py

    +

    It provides code to create PyTorch dataloaders with train/test/validation dataset.

    +
  • +
  • decode.py

    +

    It takes as inputs the checkpoints saved during the training stage to decode the test +dataset(s).

    +
  • +
  • model.py

    +

    It contains the definition of your fancy neural network model.

    +
  • +
  • pretrained.py

    +

    We can use this script to do inference with a pre-trained model.

    +
  • +
  • train.py

    +

    It contains training code.

    +
  • +
+
+
+

Hint

+

Please take a look at

+
+
+

to get a feel what the resulting files look like.

+
+
+

Note

+

Every model in a recipe is kept to be as self-contained as possible. +We tolerate duplicate code among different recipes.

+
+

The training stage should be invocable by:

+
+
$ cd egs/foo/ASR
+$ ./bar/train.py
+$ ./bar/train.py --help
+
+
+
+
+
+

Decoding

+

Please refer to

+
+
+

The decoding stage should be invocable by:

+
+
$ cd egs/foo/ASR
+$ ./bar/decode.py
+$ ./bar/decode.py --help
+
+
+
+
+
+

Pre-trained model

+

Please demonstrate how to use your model for inference in egs/foo/ASR/bar/pretrained.py. +If possible, please consider creating a Colab notebook to show that.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/contributing/index.html b/contributing/index.html new file mode 100644 index 000000000..3c9194e11 --- /dev/null +++ b/contributing/index.html @@ -0,0 +1,148 @@ + + + + + + + Contributing — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Contributing

+

Contributions to icefall are very welcomed. +There are many possible ways to make contributions and +two of them are:

+
+
    +
  • To write documentation

  • +
  • To write code

    +
      +
      1. +
      2. To follow the code style in the repository

      3. +
      +
    • +
      1. +
      2. To write a new recipe

      3. +
      +
    • +
    +
  • +
+
+

In this page, we describe how to contribute documentation +and code to icefall.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/genindex.html b/genindex.html new file mode 100644 index 000000000..39c6ca6a0 --- /dev/null +++ b/genindex.html @@ -0,0 +1,105 @@ + + + + + + Index — icefall 0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +

Index

+ +
+ +
+ + +
+
+
+ +
+ +
+

© Copyright 2021, icefall development team.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/huggingface/index.html b/huggingface/index.html new file mode 100644 index 000000000..6914e9f78 --- /dev/null +++ b/huggingface/index.html @@ -0,0 +1,125 @@ + + + + + + + Huggingface — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Huggingface

+

This section describes how to find pre-trained models. +It also demonstrates how to try them from within your browser +without installing anything by using +Huggingface spaces.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/huggingface/pretrained-models.html b/huggingface/pretrained-models.html new file mode 100644 index 000000000..114807a5f --- /dev/null +++ b/huggingface/pretrained-models.html @@ -0,0 +1,127 @@ + + + + + + + Pre-trained models — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Pre-trained models

+

We have uploaded pre-trained models for all recipes in icefall +to https://huggingface.co/.

+

You can find them by visiting the following link:

+

https://huggingface.co/models?search=icefall.

+

You can also find links of pre-trained models for a specific recipe +by looking at the corresponding RESULTS.md. For instance:

+
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/huggingface/spaces.html b/huggingface/spaces.html new file mode 100644 index 000000000..3bf69d116 --- /dev/null +++ b/huggingface/spaces.html @@ -0,0 +1,162 @@ + + + + + + + Huggingface spaces — icefall 0.1 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Huggingface spaces

+

We have integrated the server framework +sherpa +with Huggingface spaces +so that you can try pre-trained models from within your browser +without the need to download or install anything.

+

All you need is a browser, which can be run on Windows, macOS, Linux, or even on your +iPad and your phone.

+

Start your browser and visit the following address:

+

https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition

+

and you will see a page like the following screenshot:

+screenshot of `<https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition>`_ +

You can:

+
+
    +
  1. Select a language for recognition. Currently, we provide pre-trained models +from icefall for the following languages: Chinese, English, and +Chinese+English.

  2. +
  3. After selecting the target language, you can select a pre-trained model +corresponding to the language.

  4. +
  5. Select the decoding method. Currently, it provides greedy search +and modified_beam_search.

  6. +
  7. If you selected modified_beam_search, you can choose the number of +active paths during the search.

  8. +
  9. Either upload a file or record your speech for recognition.

  10. +
  11. Click the button Submit for recognition.

  12. +
  13. Wait for a moment and you will get the recognition results.

  14. +
+
+

The following screenshot shows an example when selecting Chinese+English:

+screenshot of `<https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition>`_ +

In the bottom part of the page, you can find a table of examples. You can click +one of them and then click Submit for recognition.

+screenshot of `<https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition>`_ +
+

YouTube Video

+

We provide the following YouTube video demonstrating how to use +https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition.

+
+

Note

+

To get the latest news of next-gen Kaldi, please subscribe +the following YouTube channel by Nadira Povey:

+
+
+
+
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 000000000..b085e30c9 --- /dev/null +++ b/index.html @@ -0,0 +1,142 @@ + + + + + + + Icefall — icefall 0.1 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ + +
+
+
+
+ + + + \ No newline at end of file diff --git a/installation/index.html b/installation/index.html new file mode 100644 index 000000000..069bdd6b4 --- /dev/null +++ b/installation/index.html @@ -0,0 +1,575 @@ + + + + + + + Installation — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Installation

+
    +
  • Supported operating systems

  • +
  • Supported devices

  • +
  • Supported python versions

  • +
  • Supported PyTorch versions

  • +
  • Supported k2 versions

  • +
+

icefall depends on k2 and +lhotse.

+

We recommend you to use the following steps to install the dependencies.

+
    +
    1. +
    2. Install PyTorch and torchaudio

    3. +
    +
  • +
    1. +
    2. Install k2

    3. +
    +
  • +
    1. +
    2. Install lhotse

    3. +
    +
  • +
+
+

Caution

+

Installation order matters.

+
+
+

(0) Install PyTorch and torchaudio

+

Please refer https://pytorch.org/ to install PyTorch +and torchaudio.

+
+
+

(1) Install k2

+

Please refer to https://k2-fsa.github.io/k2/installation/index.html +to install k2.

+
+

Caution

+

You need to install k2 with a version at least v1.9.

+
+
+

Hint

+

If you have already installed PyTorch and don’t want to replace it, +please install a version of k2 that is compiled against the version +of PyTorch you are using.

+
+
+
+

(2) Install lhotse

+

Please refer to https://lhotse.readthedocs.io/en/latest/getting-started.html#installation +to install lhotse.

+
+

Hint

+

We strongly recommend you to use:

+
pip install git+https://github.com/lhotse-speech/lhotse
+
+
+

to install the latest version of lhotse.

+
+
+
+

(3) Download icefall

+

icefall is a collection of Python scripts; what you need is to download it +and set the environment variable PYTHONPATH to point to it.

+

Assume you want to place icefall in the folder /tmp. The +following commands show you how to setup icefall:

+
cd /tmp
+git clone https://github.com/k2-fsa/icefall
+cd icefall
+pip install -r requirements.txt
+export PYTHONPATH=/tmp/icefall:$PYTHONPATH
+
+
+
+

Hint

+

You can put several versions of icefall in the same virtual environment. +To switch among different versions of icefall, just set PYTHONPATH +to point to the version you want.

+
+
+
+

Installation example

+

The following shows an example about setting up the environment.

+
+

(1) Create a virtual environment

+
$ virtualenv -p python3.8  test-icefall
+
+created virtual environment CPython3.8.6.final.0-64 in 1540ms
+  creator CPython3Posix(dest=/ceph-fj/fangjun/test-icefall, clear=False, no_vcs_ignore=False, global=False)
+  seeder FromAppData(download=False, pip=bundle, setuptools=bundle, wheel=bundle, via=copy, app_data_dir=/root/fangjun/.local/share/v
+irtualenv)
+    added seed packages: pip==21.1.3, setuptools==57.4.0, wheel==0.36.2
+  activators BashActivator,CShellActivator,FishActivator,PowerShellActivator,PythonActivator,XonshActivator
+
+
+
+
+

(2) Activate your virtual environment

+
$ source test-icefall/bin/activate
+
+
+
+
+

(3) Install k2

+
$ pip install k2==1.4.dev20210822+cpu.torch1.9.0 -f https://k2-fsa.org/nightly/index.html
+
+Looking in links: https://k2-fsa.org/nightly/index.html
+Collecting k2==1.4.dev20210822+cpu.torch1.9.0
+  Downloading https://k2-fsa.org/nightly/whl/k2-1.4.dev20210822%2Bcpu.torch1.9.0-cp38-cp38-linux_x86_64.whl (1.6 MB)
+     |________________________________| 1.6 MB 185 kB/s
+Collecting graphviz
+  Downloading graphviz-0.17-py3-none-any.whl (18 kB)
+Collecting torch==1.9.0
+  Using cached torch-1.9.0-cp38-cp38-manylinux1_x86_64.whl (831.4 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-3.10.0.0-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch, graphviz, k2
+Successfully installed graphviz-0.17 k2-1.4.dev20210822+cpu.torch1.9.0 torch-1.9.0 typing-extensions-3.10.0.0
+
+
+
+

Warning

+

We choose to install a CPU version of k2 for testing. You would probably want to install +a CUDA version of k2.

+
+
+
+

(4) Install lhotse

+
$ pip install git+https://github.com/lhotse-speech/lhotse
+
+Collecting git+https://github.com/lhotse-speech/lhotse
+  Cloning https://github.com/lhotse-speech/lhotse to /tmp/pip-req-build-7b1b76ge
+  Running command git clone -q https://github.com/lhotse-speech/lhotse /tmp/pip-req-build-7b1b76ge
+Collecting audioread>=2.1.9
+  Using cached audioread-2.1.9-py3-none-any.whl
+Collecting SoundFile>=0.10
+  Using cached SoundFile-0.10.3.post1-py2.py3-none-any.whl (21 kB)
+Collecting click>=7.1.1
+  Using cached click-8.0.1-py3-none-any.whl (97 kB)
+Collecting cytoolz>=0.10.1
+  Using cached cytoolz-0.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.9 MB)
+Collecting dataclasses
+  Using cached dataclasses-0.6-py3-none-any.whl (14 kB)
+Collecting h5py>=2.10.0
+  Downloading h5py-3.4.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (4.5 MB)
+     |________________________________| 4.5 MB 684 kB/s
+Collecting intervaltree>=3.1.0
+  Using cached intervaltree-3.1.0-py2.py3-none-any.whl
+Collecting lilcom>=1.1.0
+  Using cached lilcom-1.1.1-cp38-cp38-linux_x86_64.whl
+Collecting numpy>=1.18.1
+  Using cached numpy-1.21.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (15.8 MB)
+Collecting packaging
+  Using cached packaging-21.0-py3-none-any.whl (40 kB)
+Collecting pyyaml>=5.3.1
+  Using cached PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl (662 kB)
+Collecting tqdm
+  Downloading tqdm-4.62.1-py2.py3-none-any.whl (76 kB)
+     |________________________________| 76 kB 2.7 MB/s
+Collecting torchaudio==0.9.0
+  Downloading torchaudio-0.9.0-cp38-cp38-manylinux1_x86_64.whl (1.9 MB)
+     |________________________________| 1.9 MB 73.1 MB/s
+Requirement already satisfied: torch==1.9.0 in ./test-icefall/lib/python3.8/site-packages (from torchaudio==0.9.0->lhotse===0.8.0.dev
+-2a1410b-clean) (1.9.0)
+Requirement already satisfied: typing-extensions in ./test-icefall/lib/python3.8/site-packages (from torch==1.9.0->torchaudio==0.9.0-
+>lhotse===0.8.0.dev-2a1410b-clean) (3.10.0.0)
+Collecting toolz>=0.8.0
+  Using cached toolz-0.11.1-py3-none-any.whl (55 kB)
+Collecting sortedcontainers<3.0,>=2.0
+  Using cached sortedcontainers-2.4.0-py2.py3-none-any.whl (29 kB)
+Collecting cffi>=1.0
+  Using cached cffi-1.14.6-cp38-cp38-manylinux1_x86_64.whl (411 kB)
+Collecting pycparser
+  Using cached pycparser-2.20-py2.py3-none-any.whl (112 kB)
+Collecting pyparsing>=2.0.2
+  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
+Building wheels for collected packages: lhotse
+  Building wheel for lhotse (setup.py) ... done
+  Created wheel for lhotse: filename=lhotse-0.8.0.dev_2a1410b_clean-py3-none-any.whl size=342242 sha256=f683444afa4dc0881133206b4646a
+9d0f774224cc84000f55d0a67f6e4a37997
+  Stored in directory: /tmp/pip-ephem-wheel-cache-ftu0qysz/wheels/7f/7a/8e/a0bf241336e2e3cb573e1e21e5600952d49f5162454f2e612f
+  WARNING: Built wheel for lhotse is invalid: Metadata 1.2 mandates PEP 440 version, but '0.8.0.dev-2a1410b-clean' is not
+Failed to build lhotse
+Installing collected packages: pycparser, toolz, sortedcontainers, pyparsing, numpy, cffi, tqdm, torchaudio, SoundFile, pyyaml, packa
+ging, lilcom, intervaltree, h5py, dataclasses, cytoolz, click, audioread, lhotse
+    Running setup.py install for lhotse ... done
+  DEPRECATION: lhotse was installed using the legacy 'setup.py install' method, because a wheel could not be built for it. A possible
+ replacement is to fix the wheel build issue reported above. You can find discussion regarding this at https://github.com/pypa/pip/is
+sues/8368.
+Successfully installed SoundFile-0.10.3.post1 audioread-2.1.9 cffi-1.14.6 click-8.0.1 cytoolz-0.11.0 dataclasses-0.6 h5py-3.4.0 inter
+valtree-3.1.0 lhotse-0.8.0.dev-2a1410b-clean lilcom-1.1.1 numpy-1.21.2 packaging-21.0 pycparser-2.20 pyparsing-2.4.7 pyyaml-5.4.1 sor
+tedcontainers-2.4.0 toolz-0.11.1 torchaudio-0.9.0 tqdm-4.62.1
+
+
+
+
+

(5) Download icefall

+
$ cd /tmp
+$ git clone https://github.com/k2-fsa/icefall
+
+Cloning into 'icefall'...
+remote: Enumerating objects: 500, done.
+remote: Counting objects: 100% (500/500), done.
+remote: Compressing objects: 100% (308/308), done.
+remote: Total 500 (delta 263), reused 307 (delta 102), pack-reused 0
+Receiving objects: 100% (500/500), 172.49 KiB | 385.00 KiB/s, done.
+Resolving deltas: 100% (263/263), done.
+
+$ cd icefall
+$ pip install -r requirements.txt
+
+Collecting kaldilm
+  Downloading kaldilm-1.8.tar.gz (48 kB)
+     |________________________________| 48 kB 574 kB/s
+Collecting kaldialign
+  Using cached kaldialign-0.2-cp38-cp38-linux_x86_64.whl
+Collecting sentencepiece>=0.1.96
+  Using cached sentencepiece-0.1.96-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)
+Collecting tensorboard
+  Using cached tensorboard-2.6.0-py3-none-any.whl (5.6 MB)
+Requirement already satisfied: setuptools>=41.0.0 in /ceph-fj/fangjun/test-icefall/lib/python3.8/site-packages (from tensorboard->-r
+requirements.txt (line 4)) (57.4.0)
+Collecting absl-py>=0.4
+  Using cached absl_py-0.13.0-py3-none-any.whl (132 kB)
+Collecting google-auth-oauthlib<0.5,>=0.4.1
+  Using cached google_auth_oauthlib-0.4.5-py2.py3-none-any.whl (18 kB)
+Collecting grpcio>=1.24.3
+  Using cached grpcio-1.39.0-cp38-cp38-manylinux2014_x86_64.whl (4.3 MB)
+Requirement already satisfied: wheel>=0.26 in /ceph-fj/fangjun/test-icefall/lib/python3.8/site-packages (from tensorboard->-r require
+ments.txt (line 4)) (0.36.2)
+Requirement already satisfied: numpy>=1.12.0 in /ceph-fj/fangjun/test-icefall/lib/python3.8/site-packages (from tensorboard->-r requi
+rements.txt (line 4)) (1.21.2)
+Collecting protobuf>=3.6.0
+  Using cached protobuf-3.17.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.0 MB)
+Collecting werkzeug>=0.11.15
+  Using cached Werkzeug-2.0.1-py3-none-any.whl (288 kB)
+Collecting tensorboard-data-server<0.7.0,>=0.6.0
+  Using cached tensorboard_data_server-0.6.1-py3-none-manylinux2010_x86_64.whl (4.9 MB)
+Collecting google-auth<2,>=1.6.3
+  Downloading google_auth-1.35.0-py2.py3-none-any.whl (152 kB)
+     |________________________________| 152 kB 1.4 MB/s
+Collecting requests<3,>=2.21.0
+  Using cached requests-2.26.0-py2.py3-none-any.whl (62 kB)
+Collecting tensorboard-plugin-wit>=1.6.0
+  Using cached tensorboard_plugin_wit-1.8.0-py3-none-any.whl (781 kB)
+Collecting markdown>=2.6.8
+  Using cached Markdown-3.3.4-py3-none-any.whl (97 kB)
+Collecting six
+  Using cached six-1.16.0-py2.py3-none-any.whl (11 kB)
+Collecting cachetools<5.0,>=2.0.0
+  Using cached cachetools-4.2.2-py3-none-any.whl (11 kB)
+Collecting rsa<5,>=3.1.4
+  Using cached rsa-4.7.2-py3-none-any.whl (34 kB)
+Collecting pyasn1-modules>=0.2.1
+  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
+Collecting requests-oauthlib>=0.7.0
+  Using cached requests_oauthlib-1.3.0-py2.py3-none-any.whl (23 kB)
+Collecting pyasn1<0.5.0,>=0.4.6
+  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
+Collecting urllib3<1.27,>=1.21.1
+  Using cached urllib3-1.26.6-py2.py3-none-any.whl (138 kB)
+Collecting certifi>=2017.4.17
+  Using cached certifi-2021.5.30-py2.py3-none-any.whl (145 kB)
+Collecting charset-normalizer~=2.0.0
+  Using cached charset_normalizer-2.0.4-py3-none-any.whl (36 kB)
+Collecting idna<4,>=2.5
+  Using cached idna-3.2-py3-none-any.whl (59 kB)
+Collecting oauthlib>=3.0.0
+  Using cached oauthlib-3.1.1-py2.py3-none-any.whl (146 kB)
+Building wheels for collected packages: kaldilm
+  Building wheel for kaldilm (setup.py) ... done
+  Created wheel for kaldilm: filename=kaldilm-1.8-cp38-cp38-linux_x86_64.whl size=897233 sha256=eccb906cafcd45bf9a7e1a1718e4534254bfb
+f4c0d0cbc66eee6c88d68a63862
+  Stored in directory: /root/fangjun/.cache/pip/wheels/85/7d/63/f2dd586369b8797cb36d213bf3a84a789eeb92db93d2e723c9
+Successfully built kaldilm
+Installing collected packages: urllib3, pyasn1, idna, charset-normalizer, certifi, six, rsa, requests, pyasn1-modules, oauthlib, cach
+etools, requests-oauthlib, google-auth, werkzeug, tensorboard-plugin-wit, tensorboard-data-server, protobuf, markdown, grpcio, google
+-auth-oauthlib, absl-py, tensorboard, sentencepiece, kaldilm, kaldialign
+Successfully installed absl-py-0.13.0 cachetools-4.2.2 certifi-2021.5.30 charset-normalizer-2.0.4 google-auth-1.35.0 google-auth-oaut
+hlib-0.4.5 grpcio-1.39.0 idna-3.2 kaldialign-0.2 kaldilm-1.8 markdown-3.3.4 oauthlib-3.1.1 protobuf-3.17.3 pyasn1-0.4.8 pyasn1-module
+s-0.2.8 requests-2.26.0 requests-oauthlib-1.3.0 rsa-4.7.2 sentencepiece-0.1.96 six-1.16.0 tensorboard-2.6.0 tensorboard-data-server-0
+.6.1 tensorboard-plugin-wit-1.8.0 urllib3-1.26.6 werkzeug-2.0.1
+
+
+
+
+
+

Test Your Installation

+

To test that your installation is successful, let us run +the yesno recipe +on CPU.

+
+

Data preparation

+
$ export PYTHONPATH=/tmp/icefall:$PYTHONPATH
+$ cd /tmp/icefall
+$ cd egs/yesno/ASR
+$ ./prepare.sh
+
+
+

The log of running ./prepare.sh is:

+
2021-08-23 19:27:26 (prepare.sh:24:main) dl_dir: /tmp/icefall/egs/yesno/ASR/download
+2021-08-23 19:27:26 (prepare.sh:27:main) stage 0: Download data
+Downloading waves_yesno.tar.gz: 4.49MB [00:03, 1.39MB/s]
+2021-08-23 19:27:30 (prepare.sh:36:main) Stage 1: Prepare yesno manifest
+2021-08-23 19:27:31 (prepare.sh:42:main) Stage 2: Compute fbank for yesno
+2021-08-23 19:27:32,803 INFO [compute_fbank_yesno.py:52] Processing train
+Extracting and storing features: 100%|_______________________________________________________________| 90/90 [00:01<00:00, 80.57it/s]
+2021-08-23 19:27:34,085 INFO [compute_fbank_yesno.py:52] Processing test
+Extracting and storing features: 100%|______________________________________________________________| 30/30 [00:00<00:00, 248.21it/s]
+2021-08-23 19:27:34 (prepare.sh:48:main) Stage 3: Prepare lang
+2021-08-23 19:27:35 (prepare.sh:63:main) Stage 4: Prepare G
+/tmp/pip-install-fcordre9/kaldilm_6899d26f2d684ad48f21025950cd2866/kaldilm/csrc/arpa_file_parser.cc:void kaldilm::ArpaFileParser::Rea
+d(std::istream&):79
+[I] Reading \data\ section.
+/tmp/pip-install-fcordre9/kaldilm_6899d26f2d684ad48f21025950cd2866/kaldilm/csrc/arpa_file_parser.cc:void kaldilm::ArpaFileParser::Rea
+d(std::istream&):140
+[I] Reading \1-grams: section.
+2021-08-23 19:27:35 (prepare.sh:89:main) Stage 5: Compile HLG
+2021-08-23 19:27:35,928 INFO [compile_hlg.py:120] Processing data/lang_phone
+2021-08-23 19:27:35,929 INFO [lexicon.py:116] Converting L.pt to Linv.pt
+2021-08-23 19:27:35,931 INFO [compile_hlg.py:48] Building ctc_topo. max_token_id: 3
+2021-08-23 19:27:35,932 INFO [compile_hlg.py:52] Loading G.fst.txt
+2021-08-23 19:27:35,932 INFO [compile_hlg.py:62] Intersecting L and G
+2021-08-23 19:27:35,933 INFO [compile_hlg.py:64] LG shape: (4, None)
+2021-08-23 19:27:35,933 INFO [compile_hlg.py:66] Connecting LG
+2021-08-23 19:27:35,933 INFO [compile_hlg.py:68] LG shape after k2.connect: (4, None)
+2021-08-23 19:27:35,933 INFO [compile_hlg.py:70] <class 'torch.Tensor'>
+2021-08-23 19:27:35,933 INFO [compile_hlg.py:71] Determinizing LG
+2021-08-23 19:27:35,934 INFO [compile_hlg.py:74] <class '_k2.RaggedInt'>
+2021-08-23 19:27:35,934 INFO [compile_hlg.py:76] Connecting LG after k2.determinize
+2021-08-23 19:27:35,934 INFO [compile_hlg.py:79] Removing disambiguation symbols on LG
+2021-08-23 19:27:35,934 INFO [compile_hlg.py:87] LG shape after k2.remove_epsilon: (6, None)
+2021-08-23 19:27:35,935 INFO [compile_hlg.py:92] Arc sorting LG
+2021-08-23 19:27:35,935 INFO [compile_hlg.py:95] Composing H and LG
+2021-08-23 19:27:35,935 INFO [compile_hlg.py:102] Connecting LG
+2021-08-23 19:27:35,935 INFO [compile_hlg.py:105] Arc sorting LG
+2021-08-23 19:27:35,936 INFO [compile_hlg.py:107] HLG.shape: (8, None)
+2021-08-23 19:27:35,936 INFO [compile_hlg.py:123] Saving HLG.pt to data/lang_phone
+
+
+
+
+

Training

+

Now let us run the training part:

+
$ export CUDA_VISIBLE_DEVICES=""
+$ ./tdnn/train.py
+
+
+
+

Caution

+

We use export CUDA_VISIBLE_DEVICES="" so that icefall uses CPU +even if there are GPUs available.

+
+

The training log is given below:

+
2021-08-23 19:30:31,072 INFO [train.py:465] Training started
+2021-08-23 19:30:31,072 INFO [train.py:466] {'exp_dir': PosixPath('tdnn/exp'), 'lang_dir': PosixPath('data/lang_phone'), 'lr': 0.01,
+'feature_dim': 23, 'weight_decay': 1e-06, 'start_epoch': 0, 'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, '
+best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 10, 'valid_interval': 10, 'beam_size': 10, 'reduction': 'sum', 'use_doub
+le_scores': True, 'world_size': 1, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 15, 'feature_dir': PosixPath('data/fbank'
+), 'max_duration': 30.0, 'bucketing_sampler': False, 'num_buckets': 10, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0
+, 'on_the_fly_feats': False, 'shuffle': True, 'return_cuts': True, 'num_workers': 2}
+2021-08-23 19:30:31,074 INFO [lexicon.py:113] Loading pre-compiled data/lang_phone/Linv.pt
+2021-08-23 19:30:31,098 INFO [asr_datamodule.py:146] About to get train cuts
+2021-08-23 19:30:31,098 INFO [asr_datamodule.py:240] About to get train cuts
+2021-08-23 19:30:31,102 INFO [asr_datamodule.py:149] About to create train dataset
+2021-08-23 19:30:31,102 INFO [asr_datamodule.py:200] Using SingleCutSampler.
+2021-08-23 19:30:31,102 INFO [asr_datamodule.py:206] About to create train dataloader
+2021-08-23 19:30:31,102 INFO [asr_datamodule.py:219] About to get test cuts
+2021-08-23 19:30:31,102 INFO [asr_datamodule.py:246] About to get test cuts
+2021-08-23 19:30:31,357 INFO [train.py:416] Epoch 0, batch 0, batch avg loss 1.0789, total avg loss: 1.0789, batch size: 4
+2021-08-23 19:30:31,848 INFO [train.py:416] Epoch 0, batch 10, batch avg loss 0.5356, total avg loss: 0.7556, batch size: 4
+2021-08-23 19:30:32,301 INFO [train.py:432] Epoch 0, valid loss 0.9972, best valid loss: 0.9972 best valid epoch: 0
+2021-08-23 19:30:32,805 INFO [train.py:416] Epoch 0, batch 20, batch avg loss 0.2436, total avg loss: 0.5717, batch size: 3
+2021-08-23 19:30:33,109 INFO [train.py:432] Epoch 0, valid loss 0.4167, best valid loss: 0.4167 best valid epoch: 0
+2021-08-23 19:30:33,121 INFO [checkpoint.py:62] Saving checkpoint to tdnn/exp/epoch-0.pt
+2021-08-23 19:30:33,325 INFO [train.py:416] Epoch 1, batch 0, batch avg loss 0.2214, total avg loss: 0.2214, batch size: 5
+2021-08-23 19:30:33,798 INFO [train.py:416] Epoch 1, batch 10, batch avg loss 0.0781, total avg loss: 0.1343, batch size: 5
+2021-08-23 19:30:34,065 INFO [train.py:432] Epoch 1, valid loss 0.0859, best valid loss: 0.0859 best valid epoch: 1
+2021-08-23 19:30:34,556 INFO [train.py:416] Epoch 1, batch 20, batch avg loss 0.0421, total avg loss: 0.0975, batch size: 3
+2021-08-23 19:30:34,810 INFO [train.py:432] Epoch 1, valid loss 0.0431, best valid loss: 0.0431 best valid epoch: 1
+2021-08-23 19:30:34,824 INFO [checkpoint.py:62] Saving checkpoint to tdnn/exp/epoch-1.pt
+
+... ...
+
+2021-08-23 19:30:49,657 INFO [train.py:416] Epoch 13, batch 0, batch avg loss 0.0109, total avg loss: 0.0109, batch size: 5
+2021-08-23 19:30:49,984 INFO [train.py:416] Epoch 13, batch 10, batch avg loss 0.0093, total avg loss: 0.0096, batch size: 4
+2021-08-23 19:30:50,239 INFO [train.py:432] Epoch 13, valid loss 0.0104, best valid loss: 0.0101 best valid epoch: 12
+2021-08-23 19:30:50,569 INFO [train.py:416] Epoch 13, batch 20, batch avg loss 0.0092, total avg loss: 0.0096, batch size: 2
+2021-08-23 19:30:50,819 INFO [train.py:432] Epoch 13, valid loss 0.0101, best valid loss: 0.0101 best valid epoch: 13
+2021-08-23 19:30:50,835 INFO [checkpoint.py:62] Saving checkpoint to tdnn/exp/epoch-13.pt
+2021-08-23 19:30:51,024 INFO [train.py:416] Epoch 14, batch 0, batch avg loss 0.0105, total avg loss: 0.0105, batch size: 5
+2021-08-23 19:30:51,317 INFO [train.py:416] Epoch 14, batch 10, batch avg loss 0.0099, total avg loss: 0.0097, batch size: 4
+2021-08-23 19:30:51,552 INFO [train.py:432] Epoch 14, valid loss 0.0108, best valid loss: 0.0101 best valid epoch: 13
+2021-08-23 19:30:51,869 INFO [train.py:416] Epoch 14, batch 20, batch avg loss 0.0096, total avg loss: 0.0097, batch size: 5
+2021-08-23 19:30:52,107 INFO [train.py:432] Epoch 14, valid loss 0.0102, best valid loss: 0.0101 best valid epoch: 13
+2021-08-23 19:30:52,126 INFO [checkpoint.py:62] Saving checkpoint to tdnn/exp/epoch-14.pt
+2021-08-23 19:30:52,128 INFO [train.py:537] Done!
+
+
+
+
+

Decoding

+

Let us use the trained model to decode the test set:

+
$ ./tdnn/decode.py
+
+
+

The decoding log is:

+
2021-08-23 19:35:30,192 INFO [decode.py:249] Decoding started
+2021-08-23 19:35:30,192 INFO [decode.py:250] {'exp_dir': PosixPath('tdnn/exp'), 'lang_dir': PosixPath('data/lang_phone'), 'lm_dir': PosixPath('data/lm'), 'feature_dim': 23, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'epoch': 14, 'avg': 2, 'feature_dir': PosixPath('data/fbank'), 'max_duration': 30.0, 'bucketing_sampler': False, 'num_buckets': 10, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'return_cuts': True, 'num_workers': 2}
+2021-08-23 19:35:30,193 INFO [lexicon.py:113] Loading pre-compiled data/lang_phone/Linv.pt
+2021-08-23 19:35:30,213 INFO [decode.py:259] device: cpu
+2021-08-23 19:35:30,217 INFO [decode.py:279] averaging ['tdnn/exp/epoch-13.pt', 'tdnn/exp/epoch-14.pt']
+/tmp/icefall/icefall/checkpoint.py:146: UserWarning: floor_divide is deprecated, and will be removed in a future version of pytorch.
+It currently rounds toward 0 (like the 'trunc' function NOT 'floor'). This results in incorrect rounding for negative values.
+To keep the current behavior, use torch.div(a, b, rounding_mode='trunc'), or for actual floor division, use torch.div(a, b, rounding_mode='floor'). (Triggered internally at  /pytorch/aten/src/ATen/native/BinaryOps.cpp:450.)
+  avg[k] //= n
+2021-08-23 19:35:30,220 INFO [asr_datamodule.py:219] About to get test cuts
+2021-08-23 19:35:30,220 INFO [asr_datamodule.py:246] About to get test cuts
+2021-08-23 19:35:30,409 INFO [decode.py:190] batch 0/8, cuts processed until now is 4
+2021-08-23 19:35:30,571 INFO [decode.py:228] The transcripts are stored in tdnn/exp/recogs-test_set.txt
+2021-08-23 19:35:30,572 INFO [utils.py:317] [test_set] %WER 0.42% [1 / 240, 0 ins, 1 del, 0 sub ]
+2021-08-23 19:35:30,573 INFO [decode.py:236] Wrote detailed error stats to tdnn/exp/errs-test_set.txt
+2021-08-23 19:35:30,573 INFO [decode.py:299] Done!
+
+
+

Congratulations! You have successfully setup the environment and have run the first recipe in icefall.

+

Have fun with icefall!

+
+
+
+

YouTube Video

+

We provide the following YouTube video showing how to install icefall. +It also shows how to debug various problems that you may encounter while +using icefall.

+
+

Note

+

To get the latest news of next-gen Kaldi, please subscribe +the following YouTube channel by Nadira Povey:

+
+
+
+
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/objects.inv b/objects.inv new file mode 100644 index 000000000..4dc5512ac Binary files /dev/null and b/objects.inv differ diff --git a/recipes/aishell/conformer_ctc.html b/recipes/aishell/conformer_ctc.html new file mode 100644 index 000000000..1ae1f0eea --- /dev/null +++ b/recipes/aishell/conformer_ctc.html @@ -0,0 +1,816 @@ + + + + + + + Conformer CTC — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Conformer CTC

+

This tutorial shows you how to run a conformer ctc model +with the Aishell dataset.

+
+

Hint

+

We assume you have read the page Installation and have setup +the environment for icefall.

+
+
+

Hint

+

We recommend you to use a GPU or several GPUs to run this recipe.

+
+

In this tutorial, you will learn:

+
+
    +
    1. +
    2. How to prepare data for training and decoding

    3. +
    +
  • +
    1. +
    2. How to start the training, either with a single GPU or multiple GPUs

    3. +
    +
  • +
    1. +
    2. How to do decoding after training, with ctc-decoding, 1best and attention decoder rescoring

    3. +
    +
  • +
    1. +
    2. How to use a pre-trained model, provided by us

    3. +
    +
  • +
+
+
+

Data preparation

+
$ cd egs/aishell/ASR
+$ ./prepare.sh
+
+
+

The script ./prepare.sh handles the data preparation for you, automagically. +All you need to do is to run it.

+

The data preparation contains several stages, you can use the following two +options:

+
+
    +
  • --stage

  • +
  • --stop-stage

  • +
+
+

to control which stage(s) should be run. By default, all stages are executed.

+

For example,

+
$ cd egs/aishell/ASR
+$ ./prepare.sh --stage 0 --stop-stage 0
+
+
+

means to run only stage 0.

+

To run stage 2 to stage 5, use:

+
$ ./prepare.sh --stage 2 --stop-stage 5
+
+
+
+

Hint

+

If you have pre-downloaded the Aishell +dataset and the musan dataset, say, +they are saved in /tmp/aishell and /tmp/musan, you can modify +the dl_dir variable in ./prepare.sh to point to /tmp so that +./prepare.sh won’t re-download them.

+
+
+

Hint

+

A 3-gram language model will be downloaded from huggingface, we assume you have +intalled and initialized git-lfs. If not, you could install git-lfs by

+
$ sudo apt-get install git-lfs
+$ git-lfs install
+
+
+

If you don’t have the sudo permission, you could download the +git-lfs binary here, then add it to you PATH.

+
+
+

Note

+

All generated files by ./prepare.sh, e.g., features, lexicon, etc, +are saved in ./data directory.

+
+
+
+

Training

+
+

Configurable options

+
$ cd egs/aishell/ASR
+$ ./conformer_ctc/train.py --help
+
+
+

shows you the training options that can be passed from the commandline. +The following options are used quite often:

+
+
    +
  • --exp-dir

    +

    The experiment folder to save logs and model checkpoints, +default ./conformer_ctc/exp.

    +
  • +
  • --num-epochs

    +

    It is the number of epochs to train. For instance, +./conformer_ctc/train.py --num-epochs 30 trains for 30 epochs +and generates epoch-0.pt, epoch-1.pt, …, epoch-29.pt +in the folder set by --exp-dir.

    +
  • +
  • --start-epoch

    +

    It’s used to resume training. +./conformer_ctc/train.py --start-epoch 10 loads the +checkpoint ./conformer_ctc/exp/epoch-9.pt and starts +training from epoch 10, based on the state from epoch 9.

    +
  • +
  • --world-size

    +

    It is used for multi-GPU single-machine DDP training.

    +
    +
      +
      1. +
      2. If it is 1, then no DDP training is used.

      3. +
      +
    • +
      1. +
      2. If it is 2, then GPU 0 and GPU 1 are used for DDP training.

      3. +
      +
    • +
    +
    +

    The following shows some use cases with it.

    +
    +

    Use case 1: You have 4 GPUs, but you only want to use GPU 0 and +GPU 2 for training. You can do the following:

    +
    +
    $ cd egs/aishell/ASR
    +$ export CUDA_VISIBLE_DEVICES="0,2"
    +$ ./conformer_ctc/train.py --world-size 2
    +
    +
    +
    +

    Use case 2: You have 4 GPUs and you want to use all of them +for training. You can do the following:

    +
    +
    $ cd egs/aishell/ASR
    +$ ./conformer_ctc/train.py --world-size 4
    +
    +
    +
    +

    Use case 3: You have 4 GPUs but you only want to use GPU 3 +for training. You can do the following:

    +
    +
    $ cd egs/aishell/ASR
    +$ export CUDA_VISIBLE_DEVICES="3"
    +$ ./conformer_ctc/train.py --world-size 1
    +
    +
    +
    +
    +
    +

    Caution

    +

    Only multi-GPU single-machine DDP training is implemented at present. +Multi-GPU multi-machine DDP training will be added later.

    +
    +
  • +
  • --max-duration

    +

    It specifies the number of seconds over all utterances in a +batch, before padding. +If you encounter CUDA OOM, please reduce it. For instance, if +your are using V100 NVIDIA GPU, we recommend you to set it to 200.

    +
    +

    Hint

    +

    Due to padding, the number of seconds of all utterances in a +batch will usually be larger than --max-duration.

    +

    A larger value for --max-duration may cause OOM during training, +while a smaller value may increase the training time. You have to +tune it.

    +
    +
  • +
+
+
+
+

Pre-configured options

+

There are some training options, e.g., weight decay, +number of warmup steps, etc, +that are not passed from the commandline. +They are pre-configured by the function get_params() in +conformer_ctc/train.py

+

You don’t need to change these pre-configured parameters. If you really need to change +them, please modify ./conformer_ctc/train.py directly.

+
+

Caution

+

The training set is perturbed by speed with two factors: 0.9 and 1.1. +Each epoch actually processes 3x150 == 450 hours of data.

+
+
+
+

Training logs

+

Training logs and checkpoints are saved in the folder set by --exp-dir +(default conformer_ctc/exp). You will find the following files in that directory:

+
+
    +
  • epoch-0.pt, epoch-1.pt, …

    +

    These are checkpoint files, containing model state_dict and optimizer state_dict. +To resume training from some checkpoint, say epoch-10.pt, you can use:

    +
    +
    $ ./conformer_ctc/train.py --start-epoch 11
    +
    +
    +
    +
  • +
  • tensorboard/

    +

    This folder contains TensorBoard logs. Training loss, validation loss, learning +rate, etc, are recorded in these logs. You can visualize them by:

    +
    +
    $ cd conformer_ctc/exp/tensorboard
    +$ tensorboard dev upload --logdir . --name "Aishell conformer ctc training with icefall" --description "Training with new LabelSmoothing loss, see https://github.com/k2-fsa/icefall/pull/109"
    +
    +
    +
    +

    It will print something like below:

    +
    +
    TensorFlow installation not found - running with reduced feature set.
    +Upload started and will continue reading any new data as it's added to the logdir.
    +
    +To stop uploading, press Ctrl-C.
    +
    +New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/engw8KSkTZqS24zBV5dgCg/
    +
    +[2021-11-22T11:09:27] Started scanning logdir.
    +[2021-11-22T11:10:14] Total uploaded: 116068 scalars, 0 tensors, 0 binary objects
    +Listening for new data in logdir...
    +
    +
    +
    +

    Note there is a URL in the above output, click it and you will see +the following screenshot:

    +
    +
    +TensorBoard screenshot +
    +

    Fig. 2 TensorBoard screenshot.

    +
    +
    +
    +
  • +
  • log/log-train-xxxx

    +

    It is the detailed training log in text format, same as the one +you saw printed to the console during training.

    +
  • +
+
+
+
+

Usage examples

+

The following shows typical use cases:

+
+

Case 1

+
$ cd egs/aishell/ASR
+$ ./conformer_ctc/train.py --max-duration 200
+
+
+

It uses --max-duration of 200 to avoid OOM.

+
+
+

Case 2

+
$ cd egs/aishell/ASR
+$ export CUDA_VISIBLE_DEVICES="0,3"
+$ ./conformer_ctc/train.py --world-size 2
+
+
+

It uses GPU 0 and GPU 3 for DDP training.

+
+
+

Case 3

+
$ cd egs/aishell/ASR
+$ ./conformer_ctc/train.py --num-epochs 10 --start-epoch 3
+
+
+

It loads checkpoint ./conformer_ctc/exp/epoch-2.pt and starts +training from epoch 3. Also, it trains for 10 epochs.

+
+
+
+
+

Decoding

+

The decoding part uses checkpoints saved by the training part, so you have +to run the training part first.

+
$ cd egs/aishell/ASR
+$ ./conformer_ctc/decode.py --help
+
+
+

shows the options for decoding.

+

The commonly used options are:

+
+
    +
  • --method

    +

    This specifies the decoding method.

    +

    The following command uses attention decoder for rescoring:

    +
    $ cd egs/aishell/ASR
    +$ ./conformer_ctc/decode.py --method attention-decoder --max-duration 30 --nbest-scale 0.5
    +
    +
    +
  • +
  • --nbest-scale

    +

    It is used to scale down lattice scores so that there are more unique +paths for rescoring.

    +
  • +
  • --max-duration

    +

    It has the same meaning as the one during training. A larger +value may cause OOM.

    +
  • +
+
+
+
+

Pre-trained Model

+

We have uploaded a pre-trained model to +https://huggingface.co/pkufool/icefall_asr_aishell_conformer_ctc.

+

We describe how to use the pre-trained model to transcribe a sound file or +multiple sound files in the following.

+
+

Install kaldifeat

+

kaldifeat is used to +extract features for a single sound file or multiple sound files +at the same time.

+

Please refer to https://github.com/csukuangfj/kaldifeat for installation.

+
+
+

Download the pre-trained model

+

The following commands describe how to download the pre-trained model:

+
$ cd egs/aishell/ASR
+$ mkdir tmp
+$ cd tmp
+$ git lfs install
+$ git clone https://huggingface.co/pkufool/icefall_asr_aishell_conformer_ctc
+
+
+
+

Caution

+

You have to use git lfs to download the pre-trained model.

+
+
+

Caution

+

In order to use this pre-trained model, your k2 version has to be v1.7 or later.

+
+

After downloading, you will have the following files:

+
$ cd egs/aishell/ASR
+$ tree tmp
+
+
+
tmp/
+`-- icefall_asr_aishell_conformer_ctc
+    |-- README.md
+    |-- data
+    |   `-- lang_char
+    |       |-- HLG.pt
+    |       |-- tokens.txt
+    |       `-- words.txt
+    |-- exp
+    |   `-- pretrained.pt
+    `-- test_waves
+        |-- BAC009S0764W0121.wav
+        |-- BAC009S0764W0122.wav
+        |-- BAC009S0764W0123.wav
+        `-- trans.txt
+
+5 directories, 9 files
+
+
+

File descriptions:

+
+
    +
  • data/lang_char/HLG.pt

    +
    +

    It is the decoding graph.

    +
    +
  • +
  • data/lang_char/tokens.txt

    +
    +

    It contains tokens and their IDs. +Provided only for convenience so that you can look up the SOS/EOS ID easily.

    +
    +
  • +
  • data/lang_char/words.txt

    +
    +

    It contains words and their IDs.

    +
    +
  • +
  • exp/pretrained.pt

    +
    +

    It contains pre-trained model parameters, obtained by averaging +checkpoints from epoch-25.pt to epoch-84.pt. +Note: We have removed optimizer state_dict to reduce file size.

    +
    +
  • +
  • test_waves/*.wav

    +
    +

    It contains some test sound files from Aishell test dataset.

    +
    +
  • +
  • test_waves/trans.txt

    +
    +

    It contains the reference transcripts for the sound files in test_waves/.

    +
    +
  • +
+
+

The information of the test sound files is listed below:

+
$ soxi tmp/icefall_asr_aishell_conformer_ctc/test_wavs/*.wav
+
+Input File     : 'tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.20 = 67263 samples ~ 315.295 CDDA sectors
+File Size      : 135k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+
+Input File     : 'tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.12 = 65840 samples ~ 308.625 CDDA sectors
+File Size      : 132k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+
+Input File     : 'tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.00 = 64000 samples ~ 300 CDDA sectors
+File Size      : 128k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+Total Duration of 3 files: 00:00:12.32
+
+
+
+
+

Usage

+
$ cd egs/aishell/ASR
+$ ./conformer_ctc/pretrained.py --help
+
+
+

displays the help information.

+

It supports three decoding methods:

+
+
    +
  • CTC decoding

  • +
  • HLG decoding

  • +
  • HLG + attention decoder rescoring

  • +
+
+
+

CTC decoding

+

CTC decoding only uses the ctc topology for decoding without a lexicon and language model

+

The command to run CTC decoding is:

+
$ cd egs/aishell/ASR
+$ ./conformer_ctc/pretrained.py \
+  --checkpoint ./tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt \
+  --tokens-file ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/tokens.txt \
+  --method ctc-decoding \
+  ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0121.wav \
+  ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0122.wav \
+  ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0123.wav
+
+
+

The output is given below:

+
2021-11-18 07:53:41,707 INFO [pretrained.py:229] {'sample_rate': 16000, 'subsampling_factor': 4, 'feature_dim': 80, 'nhead': 4, 'attention_dim': 512, 'num_decoder_layers': 6, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f2fd997f752ed11bbef4c306652c433e83f9cf12', 'k2-git-date': 'Sun Sep 19 09:41:46 2021', 'lhotse-version': '0.11.0.dev+git.33cfe45.clean', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'aishell', 'icefall-git-sha1': 'd57a873-dirty', 'icefall-git-date': 'Wed Nov 17 19:53:25 2021', 'icefall-path': '/ceph-hw/kangwei/code/icefall_aishell3', 'k2-path': '/ceph-hw/kangwei/code/k2_release/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-hw/kangwei/code/lhotse/lhotse/__init__.py'}, 'checkpoint': './tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt', 'tokens_file': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/tokens.txt', 'words_file': None, 'HLG': None, 'method': 'ctc-decoding', 'num_paths': 100, 'ngram_lm_scale': 0.3, 'attention_decoder_scale': 0.9, 'nbest_scale': 0.5, 'sos_id': 1, 'eos_id': 1, 'num_classes': 4336, 'sound_files': ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav']}
+2021-11-18 07:53:41,708 INFO [pretrained.py:240] device: cuda:0
+2021-11-18 07:53:41,708 INFO [pretrained.py:242] Creating model
+2021-11-18 07:53:51,131 INFO [pretrained.py:259] Constructing Fbank computer
+2021-11-18 07:53:51,134 INFO [pretrained.py:269] Reading sound files: ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav']
+2021-11-18 07:53:51,138 INFO [pretrained.py:275] Decoding started
+2021-11-18 07:53:51,241 INFO [pretrained.py:293] Use CTC decoding
+2021-11-18 07:53:51,704 INFO [pretrained.py:369]
+./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav:
+            
+
+./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav:
+  线          
+
+./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav:
+           
+
+
+2021-11-18 07:53:51,704 INFO [pretrained.py:371] Decoding Done
+
+
+
+
+

HLG decoding

+

HLG decoding uses the best path of the decoding lattice as the decoding result.

+

The command to run HLG decoding is:

+
$ cd egs/aishell/ASR
+$ ./conformer_ctc/pretrained.py \
+  --checkpoint ./tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt \
+  --words-file ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt \
+  --HLG ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt \
+  --method 1best \
+  ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0121.wav \
+  ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0122.wav \
+  ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0123.wav
+
+
+

The output is given below:

+
2021-11-18 07:37:38,683 INFO [pretrained.py:229] {'sample_rate': 16000, 'subsampling_factor': 4, 'feature_dim': 80, 'nhead': 4, 'attention_dim': 512, 'num_decoder_layers': 6, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f2fd997f752ed11bbef4c306652c433e83f9cf12', 'k2-git-date': 'Sun Sep 19 09:41:46 2021', 'lhotse-version': '0.11.0.dev+git.33cfe45.clean', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'aishell', 'icefall-git-sha1': 'd57a873-clean', 'icefall-git-date': 'Wed Nov 17 19:53:25 2021', 'icefall-path': '/ceph-hw/kangwei/code/icefall_aishell3', 'k2-path': '/ceph-hw/kangwei/code/k2_release/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-hw/kangwei/code/lhotse/lhotse/__init__.py'}, 'checkpoint': './tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt', 'tokens_file': None, 'words_file': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt', 'HLG': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt', 'method': '1best', 'num_paths': 100, 'ngram_lm_scale': 0.3, 'attention_decoder_scale': 0.9, 'nbest_scale': 0.5, 'sos_id': 1, 'eos_id': 1, 'num_classes': 4336, 'sound_files': ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav']}
+2021-11-18 07:37:38,684 INFO [pretrained.py:240] device: cuda:0
+2021-11-18 07:37:38,684 INFO [pretrained.py:242] Creating model
+2021-11-18 07:37:47,651 INFO [pretrained.py:259] Constructing Fbank computer
+2021-11-18 07:37:47,654 INFO [pretrained.py:269] Reading sound files: ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav']
+2021-11-18 07:37:47,659 INFO [pretrained.py:275] Decoding started
+2021-11-18 07:37:47,752 INFO [pretrained.py:321] Loading HLG from ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt
+2021-11-18 07:37:51,887 INFO [pretrained.py:340] Use HLG decoding
+2021-11-18 07:37:52,102 INFO [pretrained.py:370]
+./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav:
+甚至 出现 交易 几乎 停止  情况
+
+./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav:
+一二 线 城市 虽然  处于 调整 
+
+./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav:
+ 因为 聚集  过多 公共 资源
+
+
+2021-11-18 07:37:52,102 INFO [pretrained.py:372] Decoding Done
+
+
+
+
+

HLG decoding + attention decoder rescoring

+

It extracts n paths from the lattice, recores the extracted paths with +an attention decoder. The path with the highest score is the decoding result.

+

The command to run HLG decoding + attention decoder rescoring is:

+
$ cd egs/aishell/ASR
+$ ./conformer_ctc/pretrained.py \
+  --checkpoint ./tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt \
+  --words-file ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt \
+  --HLG ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt \
+  --method attention-decoder \
+  ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0121.wav \
+  ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0122.wav \
+  ./tmp/icefall_asr_aishell_conformer_ctc/test_wavs/BAC009S0764W0123.wav
+
+
+

The output is below:

+
2021-11-18 07:42:05,965 INFO [pretrained.py:229] {'sample_rate': 16000, 'subsampling_factor': 4, 'feature_dim': 80, 'nhead': 4, 'attention_dim': 512, 'num_decoder_layers': 6, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f2fd997f752ed11bbef4c306652c433e83f9cf12', 'k2-git-date': 'Sun Sep 19 09:41:46 2021', 'lhotse-version': '0.11.0.dev+git.33cfe45.clean', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'aishell', 'icefall-git-sha1': 'd57a873-dirty', 'icefall-git-date': 'Wed Nov 17 19:53:25 2021', 'icefall-path': '/ceph-hw/kangwei/code/icefall_aishell3', 'k2-path': '/ceph-hw/kangwei/code/k2_release/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-hw/kangwei/code/lhotse/lhotse/__init__.py'}, 'checkpoint': './tmp/icefall_asr_aishell_conformer_ctc/exp/pretrained.pt', 'tokens_file': None, 'words_file': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt', 'HLG': './tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt', 'method': 'attention-decoder', 'num_paths': 100, 'ngram_lm_scale': 0.3, 'attention_decoder_scale': 0.9, 'nbest_scale': 0.5, 'sos_id': 1, 'eos_id': 1, 'num_classes': 4336, 'sound_files': ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav']}
+2021-11-18 07:42:05,966 INFO [pretrained.py:240] device: cuda:0
+2021-11-18 07:42:05,966 INFO [pretrained.py:242] Creating model
+2021-11-18 07:42:16,821 INFO [pretrained.py:259] Constructing Fbank computer
+2021-11-18 07:42:16,822 INFO [pretrained.py:269] Reading sound files: ['./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav']
+2021-11-18 07:42:16,826 INFO [pretrained.py:275] Decoding started
+2021-11-18 07:42:16,916 INFO [pretrained.py:321] Loading HLG from ./tmp/icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt
+2021-11-18 07:42:21,115 INFO [pretrained.py:345] Use HLG + attention decoder rescoring
+2021-11-18 07:42:21,888 INFO [pretrained.py:370]
+./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav:
+甚至 出现 交易 几乎 停止  情况
+
+./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav:
+一二 线 城市 虽然  处于 调整 
+
+./tmp/icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav:
+ 因为 聚集  过多 公共 资源
+
+
+2021-11-18 07:42:21,889 INFO [pretrained.py:372] Decoding Done
+
+
+
+
+
+
+

Colab notebook

+

We do provide a colab notebook for this recipe showing how to use a pre-trained model.

+

aishell asr conformer ctc colab notebook

+
+

Hint

+

Due to limited memory provided by Colab, you have to upgrade to Colab Pro to +run HLG decoding + attention decoder rescoring. +Otherwise, you can only run HLG decoding with Colab.

+
+

Congratulations! You have finished the aishell ASR recipe with +conformer CTC models in icefall.

+

If you want to deploy your trained model in C++, please read the following section.

+
+
+

Deployment with C++

+

This section describes how to deploy the pre-trained model in C++, without +Python dependencies.

+
+

Hint

+

At present, it does NOT support streaming decoding.

+
+

First, let us compile k2 from source:

+
$ cd $HOME
+$ git clone https://github.com/k2-fsa/k2
+$ cd k2
+$ git checkout v2.0-pre
+
+
+
+

Caution

+

You have to switch to the branch v2.0-pre!

+
+
$ mkdir build-release
+$ cd build-release
+$ cmake -DCMAKE_BUILD_TYPE=Release ..
+$ make -j hlg_decode
+
+# You will find four binaries in `./bin`, i.e. ./bin/hlg_decode,
+
+
+

Now you are ready to go!

+

Assume you have run:

+
+
$ cd k2/build-release
+$ ln -s /path/to/icefall-asr-aishell-conformer-ctc ./
+
+
+
+

To view the usage of ./bin/hlg_decode, run:

+
$ ./bin/hlg_decode
+
+
+

It will show you the following message:

+
Please provide --nn_model
+
+This file implements decoding with an HLG decoding graph.
+
+Usage:
+  ./bin/hlg_decode \
+    --use_gpu true \
+    --nn_model <path to torch scripted pt file> \
+    --hlg <path to HLG.pt> \
+    --word_table <path to words.txt> \
+    <path to foo.wav> \
+    <path to bar.wav> \
+    <more waves if any>
+
+To see all possible options, use
+  ./bin/hlg_decode --help
+
+Caution:
+ - Only sound files (*.wav) with single channel are supported.
+ - It assumes the model is conformer_ctc/transformer.py from icefall.
+   If you use a different model, you have to change the code
+   related to `model.forward` in this file.
+
+
+
./bin/hlg_decode \
+  --use_gpu true \
+  --nn_model icefall_asr_aishell_conformer_ctc/exp/cpu_jit.pt \
+  --hlg icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt \
+  --word_table icefall_asr_aishell_conformer_ctc/data/lang_char/words.txt \
+  icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav \
+  icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav \
+  icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav
+
+
+

The output is:

+
2021-11-18 14:48:20.89 [I] k2/torch/bin/hlg_decode.cu:115:int main(int, char**) Device: cpu
+2021-11-18 14:48:20.89 [I] k2/torch/bin/hlg_decode.cu:124:int main(int, char**) Load wave files
+2021-11-18 14:48:20.97 [I] k2/torch/bin/hlg_decode.cu:131:int main(int, char**) Build Fbank computer
+2021-11-18 14:48:20.98 [I] k2/torch/bin/hlg_decode.cu:142:int main(int, char**) Compute features
+2021-11-18 14:48:20.115 [I] k2/torch/bin/hlg_decode.cu:150:int main(int, char**) Load neural network model
+2021-11-18 14:48:20.693 [I] k2/torch/bin/hlg_decode.cu:165:int main(int, char**) Compute nnet_output
+2021-11-18 14:48:23.182 [I] k2/torch/bin/hlg_decode.cu:180:int main(int, char**) Load icefall_asr_aishell_conformer_ctc/data/lang_char/HLG.pt
+2021-11-18 14:48:33.489 [I] k2/torch/bin/hlg_decode.cu:185:int main(int, char**) Decoding
+2021-11-18 14:48:45.217 [I] k2/torch/bin/hlg_decode.cu:216:int main(int, char**)
+Decoding result:
+
+icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0121.wav
+甚至 出现 交易 几乎 停止  情况
+
+icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0122.wav
+一二 线 城市 虽然  处于 调整 
+
+icefall_asr_aishell_conformer_ctc/test_waves/BAC009S0764W0123.wav
+ 因为 聚集  过多 公共 资源
+
+
+

There is a Colab notebook showing you how to run a torch scripted model in C++. +Please see aishell asr conformer ctc torch script colab notebook

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/aishell/index.html b/recipes/aishell/index.html new file mode 100644 index 000000000..0a9bcafa8 --- /dev/null +++ b/recipes/aishell/index.html @@ -0,0 +1,136 @@ + + + + + + + aishell — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

aishell

+

Aishell is an open-source Chinese Mandarin speech corpus published by Beijing +Shell Shell Technology Co.,Ltd.

+

400 people from different accent areas in China are invited to participate in +the recording, which is conducted in a quiet indoor environment using high +fidelity microphone and downsampled to 16kHz. The manual transcription accuracy +is above 95%, through professional speech annotation and strict quality +inspection. The data is free for academic use. We hope to provide moderate +amount of data for new researchers in the field of speech recognition.

+

It can be downloaded from https://www.openslr.org/33/

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/aishell/stateless_transducer.html b/recipes/aishell/stateless_transducer.html new file mode 100644 index 000000000..b4e8f045f --- /dev/null +++ b/recipes/aishell/stateless_transducer.html @@ -0,0 +1,811 @@ + + + + + + + Stateless Transducer — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Stateless Transducer

+

This tutorial shows you how to do transducer training in icefall.

+
+

Hint

+

Instead of using RNN-T or RNN transducer, we only use transducer +here. As you will see, there are no RNNs in the model.

+
+
+

Hint

+

We assume you have read the page Installation and have setup +the environment for icefall.

+
+
+

Hint

+

We recommend you to use a GPU or several GPUs to run this recipe.

+
+

In this tutorial, you will learn:

+
+
    +
    1. +
    2. What does the transducer model look like

    3. +
    +
  • +
    1. +
    2. How to prepare data for training and decoding

    3. +
    +
  • +
    1. +
    2. How to start the training, either with a single GPU or with multiple GPUs

    3. +
    +
  • +
    1. +
    2. How to do decoding after training, with greedy search, beam search and, modified beam search

    3. +
    +
  • +
    1. +
    2. How to use a pre-trained model provided by us to transcribe sound files

    3. +
    +
  • +
+
+
+

The Model

+

The transducer model consists of 3 parts:

+
    +
  • Encoder: It is a conformer encoder with the following parameters

    +
    +
      +
    • Number of heads: 8

    • +
    • Attention dim: 512

    • +
    • Number of layers: 12

    • +
    • Feedforward dim: 2048

    • +
    +
    +
  • +
  • Decoder: We use a stateless model consisting of:

    +
    +
      +
    • An embedding layer with embedding dim 512

    • +
    • A Conv1d layer with a default kernel size 2 (i.e. it sees 2 +symbols of left-context by default)

    • +
    +
    +
  • +
  • Joiner: It consists of a nn.tanh() and a nn.Linear().

  • +
+
+

Caution

+

The decoder is stateless and very simple. It is borrowed from +https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=9054419 +(Rnn-Transducer with Stateless Prediction Network)

+

We make one modification to it: Place a Conv1d layer right after +the embedding layer.

+
+

When using Chinese characters as modelling unit, whose vocabulary size +is 4336 in this specific dataset, +the number of parameters of the model is 87939824, i.e., about 88 M.

+
+
+

The Loss

+

We are using https://github.com/csukuangfj/optimized_transducer +to compute the transducer loss, which removes extra paddings +in loss computation to save memory.

+
+

Hint

+

optimized_transducer implements the technqiues proposed +in Improving RNN Transducer Modeling for End-to-End Speech Recognition to save memory.

+

Furthermore, it supports modified transducer, limiting the maximum +number of symbols that can be emitted per frame to 1, which simplifies +the decoding process significantly. Also, the experiment results +show that it does not degrade the performance.

+

See https://github.com/csukuangfj/optimized_transducer#modified-transducer +for what exactly modified transducer is.

+

https://github.com/csukuangfj/transducer-loss-benchmarking shows that +in the unpruned case optimized_transducer has the advantage about minimizing +memory usage.

+
+
+

Todo

+

Add tutorial about pruned_transducer_stateless that uses k2 +pruned transducer loss.

+
+
+

Hint

+

You can use:

+
pip install optimized_transducer
+
+
+

to install optimized_transducer. Refer to +https://github.com/csukuangfj/optimized_transducer for other +alternatives.

+
+
+
+

Data Preparation

+

To prepare the data for training, please use the following commands:

+
cd egs/aishell/ASR
+./prepare.sh --stop-stage 4
+./prepare.sh --stage 6 --stop-stage 6
+
+
+
+

Note

+

You can use ./prepare.sh, though it will generate FSTs that +are not used in transducer training.

+
+

When you finish running the script, you will get the following two folders:

+
+
    +
  • data/fbank: It saves the pre-computed features

  • +
  • data/lang_char: It contains tokens that will be used in the training

  • +
+
+
+
+

Training

+
cd egs/aishell/ASR
+./transducer_stateless_modified/train.py --help
+
+
+

shows you the training options that can be passed from the commandline. +The following options are used quite often:

+
+
    +
  • --exp-dir

    +

    The experiment folder to save logs and model checkpoints, +defaults to ./transducer_stateless_modified/exp.

    +
  • +
  • --num-epochs

    +

    It is the number of epochs to train. For instance, +./transducer_stateless_modified/train.py --num-epochs 30 trains for 30 +epochs and generates epoch-0.pt, epoch-1.pt, …, epoch-29.pt +in the folder set by --exp-dir.

    +
  • +
  • --start-epoch

    +

    It’s used to resume training. +./transducer_stateless_modified/train.py --start-epoch 10 loads the +checkpoint from exp_dir/epoch-9.pt and starts +training from epoch 10, based on the state from epoch 9.

    +
  • +
  • --world-size

    +

    It is used for single-machine multi-GPU DDP training.

    +
    +
      +
      1. +
      2. If it is 1, then no DDP training is used.

      3. +
      +
    • +
      1. +
      2. If it is 2, then GPU 0 and GPU 1 are used for DDP training.

      3. +
      +
    • +
    +
    +

    The following shows some use cases with it.

    +
    +

    Use case 1: You have 4 GPUs, but you only want to use GPU 0 and +GPU 2 for training. You can do the following:

    +
    +
    $ cd egs/aishell/ASR
    +$ export CUDA_VISIBLE_DEVICES="0,2"
    +$ ./transducer_stateless_modified/train.py --world-size 2
    +
    +
    +
    +

    Use case 2: You have 4 GPUs and you want to use all of them +for training. You can do the following:

    +
    +
    $ cd egs/aishell/ASR
    +$ ./transducer_stateless_modified/train.py --world-size 4
    +
    +
    +
    +

    Use case 3: You have 4 GPUs but you only want to use GPU 3 +for training. You can do the following:

    +
    +
    $ cd egs/aishell/ASR
    +$ export CUDA_VISIBLE_DEVICES="3"
    +$ ./transducer_stateless_modified/train.py --world-size 1
    +
    +
    +
    +
    +
    +

    Caution

    +

    Only single-machine multi-GPU DDP training is implemented at present. +There is an on-going PR https://github.com/k2-fsa/icefall/pull/63 +that adds support for multi-machine multi-GPU DDP training.

    +
    +
  • +
  • --max-duration

    +

    It specifies the number of seconds over all utterances in a +batch before padding. +If you encounter CUDA OOM, please reduce it. For instance, if +your are using V100 NVIDIA GPU with 32 GB RAM, we recommend you +to set it to 300 when the vocabulary size is 500.

    +
    +

    Hint

    +

    Due to padding, the number of seconds of all utterances in a +batch will usually be larger than --max-duration.

    +

    A larger value for --max-duration may cause OOM during training, +while a smaller value may increase the training time. You have to +tune it.

    +
    +
  • +
  • --lr-factor

    +

    It controls the learning rate. If you use a single GPU for training, you +may want to use a small value for it. If you use multiple GPUs for training, +you may increase it.

    +
  • +
  • --context-size

    +

    It specifies the kernel size in the decoder. The default value 2 means it +functions as a tri-gram LM.

    +
  • +
  • --modified-transducer-prob

    +

    It specifies the probability to use modified transducer loss. +If it is 0, then no modified transducer is used; if it is 1, +then it uses modified transducer loss for all batches. If it is +p, it applies modified transducer with probability p.

    +
  • +
+
+

There are some training options, e.g., +number of warmup steps, +that are not passed from the commandline. +They are pre-configured by the function get_params() in +transducer_stateless_modified/train.py

+

If you need to change them, please modify ./transducer_stateless_modified/train.py directly.

+
+

Caution

+

The training set is perturbed by speed with two factors: 0.9 and 1.1. +Each epoch actually processes 3x150 == 450 hours of data.

+
+
+

Training logs

+

Training logs and checkpoints are saved in the folder set by --exp-dir +(defaults to transducer_stateless_modified/exp). You will find the following files in that directory:

+
+
    +
  • epoch-0.pt, epoch-1.pt, …

    +

    These are checkpoint files, containing model state_dict and optimizer state_dict. +To resume training from some checkpoint, say epoch-10.pt, you can use:

    +
    +
    $ ./transducer_stateless_modified/train.py --start-epoch 11
    +
    +
    +
    +
  • +
  • tensorboard/

    +

    This folder contains TensorBoard logs. Training loss, validation loss, learning +rate, etc, are recorded in these logs. You can visualize them by:

    +
    +
    $ cd transducer_stateless_modified/exp/tensorboard
    +$ tensorboard dev upload --logdir . --name "Aishell transducer training with icefall" --description "Training modified transducer, see https://github.com/k2-fsa/icefall/pull/219"
    +
    +
    +
    +

    It will print something like below:

    +
    +
    TensorFlow installation not found - running with reduced feature set.
    +Upload started and will continue reading any new data as it's added to the logdir.
    +
    +To stop uploading, press Ctrl-C.
    +
    +New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/laGZ6HrcQxOigbFD5E0Y3Q/
    +
    +[2022-03-03T14:29:45] Started scanning logdir.
    +[2022-03-03T14:29:48] Total uploaded: 8477 scalars, 0 tensors, 0 binary objects
    +Listening for new data in logdir...
    +
    +
    +
    +

    Note there is a URL in the +above output, click it and you will see the following screenshot:

    +
    +
    +TensorBoard screenshot +
    +

    Fig. 3 TensorBoard screenshot.

    +
    +
    +
    +
  • +
  • log/log-train-xxxx

    +

    It is the detailed training log in text format, same as the one +you saw printed to the console during training.

    +
  • +
+
+
+
+

Usage examples

+

The following shows typical use cases:

+
+

Case 1

+
$ cd egs/aishell/ASR
+$ ./transducer_stateless_modified/train.py --max-duration 250
+
+
+

It uses --max-duration of 250 to avoid OOM.

+
+
+

Case 2

+
$ cd egs/aishell/ASR
+$ export CUDA_VISIBLE_DEVICES="0,3"
+$ ./transducer_stateless_modified/train.py --world-size 2
+
+
+

It uses GPU 0 and GPU 3 for DDP training.

+
+
+

Case 3

+
$ cd egs/aishell/ASR
+$ ./transducer_stateless_modified/train.py --num-epochs 10 --start-epoch 3
+
+
+

It loads checkpoint ./transducer_stateless_modified/exp/epoch-2.pt and starts +training from epoch 3. Also, it trains for 10 epochs.

+
+
+
+
+

Decoding

+

The decoding part uses checkpoints saved by the training part, so you have +to run the training part first.

+
$ cd egs/aishell/ASR
+$ ./transducer_stateless_modified/decode.py --help
+
+
+

shows the options for decoding.

+

The commonly used options are:

+
+
    +
  • --method

    +

    This specifies the decoding method. Currently, it supports:

    +
    +
      +
    • greedy_search. You can provide the commandline option --max-sym-per-frame +to limit the maximum number of symbols that can be emitted per frame.

    • +
    • beam_search. You can provide the commandline option --beam-size.

    • +
    • modified_beam_search. You can also provide the commandline option --beam-size. +To use this method, we assume that you have trained your model with modified transducer, +i.e., used the option --modified-transducer-prob in the training.

    • +
    +
    +

    The following command uses greedy search for decoding

    +
    $ cd egs/aishell/ASR
    +$ ./transducer_stateless_modified/decode.py \
    +        --epoch 64 \
    +        --avg 33 \
    +        --exp-dir ./transducer_stateless_modified/exp \
    +        --max-duration 100 \
    +        --decoding-method greedy_search \
    +        --max-sym-per-frame 1
    +
    +
    +

    The following command uses beam search for decoding

    +
    $ cd egs/aishell/ASR
    +$ ./transducer_stateless_modified/decode.py \
    +        --epoch 64 \
    +        --avg 33 \
    +        --exp-dir ./transducer_stateless_modified/exp \
    +        --max-duration 100 \
    +        --decoding-method beam_search \
    +        --beam-size 4
    +
    +
    +

    The following command uses modified beam search for decoding

    +
    $ cd egs/aishell/ASR
    +$ ./transducer_stateless_modified/decode.py \
    +        --epoch 64 \
    +        --avg 33 \
    +        --exp-dir ./transducer_stateless_modified/exp \
    +        --max-duration 100 \
    +        --decoding-method modified_beam_search \
    +        --beam-size 4
    +
    +
    +
  • +
  • --max-duration

    +

    It has the same meaning as the one used in training. A larger +value may cause OOM.

    +
  • +
  • --epoch

    +

    It specifies the checkpoint from which epoch that should be used for decoding.

    +
  • +
  • --avg

    +

    It specifies the number of models to average. For instance, if it is 3 and if +--epoch=10, then it averages the checkpoints epoch-8.pt, epoch-9.pt, +and epoch-10.pt and the averaged checkpoint is used for decoding.

    +
  • +
+
+

After decoding, you can find the decoding logs and results in exp_dir/log/<decoding_method>, e.g., +exp_dir/log/greedy_search.

+
+
+

Pre-trained Model

+

We have uploaded a pre-trained model to +https://huggingface.co/csukuangfj/icefall-aishell-transducer-stateless-modified-2022-03-01

+

We describe how to use the pre-trained model to transcribe a sound file or +multiple sound files in the following.

+
+

Install kaldifeat

+

kaldifeat is used to +extract features for a single sound file or multiple sound files +at the same time.

+

Please refer to https://github.com/csukuangfj/kaldifeat for installation.

+
+
+

Download the pre-trained model

+

The following commands describe how to download the pre-trained model:

+
$ cd egs/aishell/ASR
+$ mkdir tmp
+$ cd tmp
+$ git lfs install
+$ git clone https://huggingface.co/csukuangfj/icefall-aishell-transducer-stateless-modified-2022-03-01
+
+
+
+

Caution

+

You have to use git lfs to download the pre-trained model.

+
+

After downloading, you will have the following files:

+
$ cd egs/aishell/ASR
+$ tree tmp/icefall-aishell-transducer-stateless-modified-2022-03-01
+
+
+
tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/
+|-- README.md
+|-- data
+|   `-- lang_char
+|       |-- L.pt
+|       |-- lexicon.txt
+|       |-- tokens.txt
+|       `-- words.txt
+|-- exp
+|   `-- pretrained.pt
+|-- log
+|   |-- errs-test-beam_4-epoch-64-avg-33-beam-4.txt
+|   |-- errs-test-greedy_search-epoch-64-avg-33-context-2-max-sym-per-frame-1.txt
+|   |-- log-decode-epoch-64-avg-33-beam-4-2022-03-02-12-05-03
+|   |-- log-decode-epoch-64-avg-33-context-2-max-sym-per-frame-1-2022-02-28-18-13-07
+|   |-- recogs-test-beam_4-epoch-64-avg-33-beam-4.txt
+|   `-- recogs-test-greedy_search-epoch-64-avg-33-context-2-max-sym-per-frame-1.txt
+`-- test_wavs
+    |-- BAC009S0764W0121.wav
+    |-- BAC009S0764W0122.wav
+    |-- BAC009S0764W0123.wav
+    `-- transcript.txt
+
+5 directories, 16 files
+
+
+

File descriptions:

+
+
    +
  • data/lang_char

    +

    It contains language related files. You can find the vocabulary size in tokens.txt.

    +
  • +
  • exp/pretrained.pt

    +
    +

    It contains pre-trained model parameters, obtained by averaging +checkpoints from epoch-32.pt to epoch-64.pt. +Note: We have removed optimizer state_dict to reduce file size.

    +
    +
  • +
  • log

    +
    +

    It contains decoding logs and decoded results.

    +
    +
  • +
  • test_wavs

    +
    +

    It contains some test sound files from Aishell test dataset.

    +
    +
  • +
+
+

The information of the test sound files is listed below:

+
$ soxi tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/*.wav
+
+Input File     : 'tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0121.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.20 = 67263 samples ~ 315.295 CDDA sectors
+File Size      : 135k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+
+Input File     : 'tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0122.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.12 = 65840 samples ~ 308.625 CDDA sectors
+File Size      : 132k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+
+Input File     : 'tmp/icefall-aishell-transducer-stateless-modified-2022-03-01/test_wavs/BAC009S0764W0123.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.00 = 64000 samples ~ 300 CDDA sectors
+File Size      : 128k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+Total Duration of 3 files: 00:00:12.32
+
+
+
+
+

Usage

+
$ cd egs/aishell/ASR
+$ ./transducer_stateless_modified/pretrained.py --help
+
+
+

displays the help information.

+

It supports three decoding methods:

+
+
    +
  • greedy search

  • +
  • beam search

  • +
  • modified beam search

  • +
+
+
+

Note

+

In modified beam search, it limits the maximum number of symbols that can be +emitted per frame to 1. To use this method, you have to ensure that your model +has been trained with the option --modified-transducer-prob. Otherwise, +it may give you poor results.

+
+ + + +
+
+
+

Colab notebook

+

We provide a colab notebook for this recipe showing how to use a pre-trained model to +transcribe sound files.

+

aishell asr stateless modified transducer colab notebook

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/aishell/tdnn_lstm_ctc.html b/recipes/aishell/tdnn_lstm_ctc.html new file mode 100644 index 000000000..f5234fc07 --- /dev/null +++ b/recipes/aishell/tdnn_lstm_ctc.html @@ -0,0 +1,607 @@ + + + + + + + TDNN-LSTM CTC — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

TDNN-LSTM CTC

+

This tutorial shows you how to run a tdnn-lstm ctc model +with the Aishell dataset.

+
+

Hint

+

We assume you have read the page Installation and have setup +the environment for icefall.

+
+
+

Hint

+

We recommend you to use a GPU or several GPUs to run this recipe.

+
+

In this tutorial, you will learn:

+
+
    +
    1. +
    2. How to prepare data for training and decoding

    3. +
    +
  • +
    1. +
    2. How to start the training, either with a single GPU or multiple GPUs

    3. +
    +
  • +
    1. +
    2. How to do decoding after training.

    3. +
    +
  • +
    1. +
    2. How to use a pre-trained model, provided by us

    3. +
    +
  • +
+
+
+

Data preparation

+
$ cd egs/aishell/ASR
+$ ./prepare.sh
+
+
+

The script ./prepare.sh handles the data preparation for you, automagically. +All you need to do is to run it.

+

The data preparation contains several stages, you can use the following two +options:

+
+
    +
  • --stage

  • +
  • --stop-stage

  • +
+
+

to control which stage(s) should be run. By default, all stages are executed.

+

For example,

+
$ cd egs/aishell/ASR
+$ ./prepare.sh --stage 0 --stop-stage 0
+
+
+

means to run only stage 0.

+

To run stage 2 to stage 5, use:

+
$ ./prepare.sh --stage 2 --stop-stage 5
+
+
+
+

Hint

+

If you have pre-downloaded the Aishell +dataset and the musan dataset, say, +they are saved in /tmp/aishell and /tmp/musan, you can modify +the dl_dir variable in ./prepare.sh to point to /tmp so that +./prepare.sh won’t re-download them.

+
+
+

Hint

+

A 3-gram language model will be downloaded from huggingface, we assume you have +intalled and initialized git-lfs. If not, you could install git-lfs by

+
$ sudo apt-get install git-lfs
+$ git-lfs install
+
+
+

If you don’t have the sudo permission, you could download the +git-lfs binary here, then add it to you PATH.

+
+
+

Note

+

All generated files by ./prepare.sh, e.g., features, lexicon, etc, +are saved in ./data directory.

+
+
+
+

Training

+
+

Configurable options

+
$ cd egs/aishell/ASR
+$ ./tdnn_lstm_ctc/train.py --help
+
+
+

shows you the training options that can be passed from the commandline. +The following options are used quite often:

+
+
    +
  • --num-epochs

    +

    It is the number of epochs to train. For instance, +./tdnn_lstm_ctc/train.py --num-epochs 30 trains for 30 epochs +and generates epoch-0.pt, epoch-1.pt, …, epoch-29.pt +in the folder ./tdnn_lstm_ctc/exp.

    +
  • +
  • --start-epoch

    +

    It’s used to resume training. +./tdnn_lstm_ctc/train.py --start-epoch 10 loads the +checkpoint ./tdnn_lstm_ctc/exp/epoch-9.pt and starts +training from epoch 10, based on the state from epoch 9.

    +
  • +
  • --world-size

    +

    It is used for multi-GPU single-machine DDP training.

    +
    +
      +
      1. +
      2. If it is 1, then no DDP training is used.

      3. +
      +
    • +
      1. +
      2. If it is 2, then GPU 0 and GPU 1 are used for DDP training.

      3. +
      +
    • +
    +
    +

    The following shows some use cases with it.

    +
    +

    Use case 1: You have 4 GPUs, but you only want to use GPU 0 and +GPU 2 for training. You can do the following:

    +
    +
    $ cd egs/aishell/ASR
    +$ export CUDA_VISIBLE_DEVICES="0,2"
    +$ ./tdnn_lstm_ctc/train.py --world-size 2
    +
    +
    +
    +

    Use case 2: You have 4 GPUs and you want to use all of them +for training. You can do the following:

    +
    +
    $ cd egs/aishell/ASR
    +$ ./tdnn_lstm_ctc/train.py --world-size 4
    +
    +
    +
    +

    Use case 3: You have 4 GPUs but you only want to use GPU 3 +for training. You can do the following:

    +
    +
    $ cd egs/aishell/ASR
    +$ export CUDA_VISIBLE_DEVICES="3"
    +$ ./tdnn_lstm_ctc/train.py --world-size 1
    +
    +
    +
    +
    +
    +

    Caution

    +

    Only multi-GPU single-machine DDP training is implemented at present. +Multi-GPU multi-machine DDP training will be added later.

    +
    +
  • +
  • --max-duration

    +

    It specifies the number of seconds over all utterances in a +batch, before padding. +If you encounter CUDA OOM, please reduce it. For instance, if +your are using V100 NVIDIA GPU, we recommend you to set it to 2000.

    +
    +

    Hint

    +

    Due to padding, the number of seconds of all utterances in a +batch will usually be larger than --max-duration.

    +

    A larger value for --max-duration may cause OOM during training, +while a smaller value may increase the training time. You have to +tune it.

    +
    +
  • +
+
+
+
+

Pre-configured options

+

There are some training options, e.g., weight decay, +number of warmup steps, results dir, etc, +that are not passed from the commandline. +They are pre-configured by the function get_params() in +tdnn_lstm_ctc/train.py

+

You don’t need to change these pre-configured parameters. If you really need to change +them, please modify ./tdnn_lstm_ctc/train.py directly.

+
+

Caution

+

The training set is perturbed by speed with two factors: 0.9 and 1.1. +Each epoch actually processes 3x150 == 450 hours of data.

+
+
+
+

Training logs

+

Training logs and checkpoints are saved in tdnn_lstm_ctc/exp. +You will find the following files in that directory:

+
+
    +
  • epoch-0.pt, epoch-1.pt, …

    +

    These are checkpoint files, containing model state_dict and optimizer state_dict. +To resume training from some checkpoint, say epoch-10.pt, you can use:

    +
    +
    $ ./tdnn_lstm_ctc/train.py --start-epoch 11
    +
    +
    +
    +
  • +
  • tensorboard/

    +

    This folder contains TensorBoard logs. Training loss, validation loss, learning +rate, etc, are recorded in these logs. You can visualize them by:

    +
    +
    $ cd tdnn_lstm_ctc/exp/tensorboard
    +$ tensorboard dev upload --logdir . --description "TDNN-LSTM CTC training for Aishell with icefall"
    +
    +
    +
    +

    It will print something like below:

    +
    +
    TensorFlow installation not found - running with reduced feature set.
    +Upload started and will continue reading any new data as it's added to the logdir.
    +
    +To stop uploading, press Ctrl-C.
    +
    +New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/LJI9MWUORLOw3jkdhxwk8A/
    +
    +[2021-09-13T11:59:23] Started scanning logdir.
    +[2021-09-13T11:59:24] Total uploaded: 4454 scalars, 0 tensors, 0 binary objects
    +Listening for new data in logdir...
    +
    +
    +
    +

    Note there is a URL in the above output, click it and you will see +the following screenshot:

    +
    +
    +TensorBoard screenshot +
    +

    Fig. 1 TensorBoard screenshot.

    +
    +
    +
    +
  • +
  • log/log-train-xxxx

    +

    It is the detailed training log in text format, same as the one +you saw printed to the console during training.

    +
  • +
+
+
+
+

Usage examples

+

The following shows typical use cases:

+
+

Case 1

+
$ cd egs/aishell/ASR
+$ export CUDA_VISIBLE_DEVICES="0,3"
+$ ./tdnn_lstm_ctc/train.py --world-size 2
+
+
+

It uses GPU 0 and GPU 3 for DDP training.

+
+
+

Case 2

+
$ cd egs/aishell/ASR
+$ ./tdnn_lstm_ctc/train.py --num-epochs 10 --start-epoch 3
+
+
+

It loads checkpoint ./tdnn_lstm_ctc/exp/epoch-2.pt and starts +training from epoch 3. Also, it trains for 10 epochs.

+
+
+
+
+

Decoding

+

The decoding part uses checkpoints saved by the training part, so you have +to run the training part first.

+
$ cd egs/aishell/ASR
+$ ./tdnn_lstm_ctc/decode.py --help
+
+
+

shows the options for decoding.

+

The commonly used options are:

+
+
    +
  • --method

    +

    This specifies the decoding method.

    +

    The following command uses attention decoder for rescoring:

    +
    $ cd egs/aishell/ASR
    +$ ./tdnn_lstm_ctc/decode.py --method 1best --max-duration 100
    +
    +
    +
  • +
  • --max-duration

    +

    It has the same meaning as the one during training. A larger +value may cause OOM.

    +
  • +
+
+
+
+

Pre-trained Model

+

We have uploaded a pre-trained model to +https://huggingface.co/pkufool/icefall_asr_aishell_tdnn_lstm_ctc.

+

We describe how to use the pre-trained model to transcribe a sound file or +multiple sound files in the following.

+
+

Install kaldifeat

+

kaldifeat is used to +extract features for a single sound file or multiple sound files +at the same time.

+

Please refer to https://github.com/csukuangfj/kaldifeat for installation.

+
+
+

Download the pre-trained model

+

The following commands describe how to download the pre-trained model:

+
$ cd egs/aishell/ASR
+$ mkdir tmp
+$ cd tmp
+$ git lfs install
+$ git clone https://huggingface.co/pkufool/icefall_asr_aishell_tdnn_lstm_ctc
+
+
+
+

Caution

+

You have to use git lfs to download the pre-trained model.

+
+
+

Caution

+

In order to use this pre-trained model, your k2 version has to be v1.7 or later.

+
+

After downloading, you will have the following files:

+
$ cd egs/aishell/ASR
+$ tree tmp
+
+
+
tmp/
+`-- icefall_asr_aishell_tdnn_lstm_ctc
+    |-- README.md
+    |-- data
+    |   `-- lang_phone
+    |       |-- HLG.pt
+    |       |-- tokens.txt
+    |       `-- words.txt
+    |-- exp
+    |   `-- pretrained.pt
+    `-- test_waves
+        |-- BAC009S0764W0121.wav
+        |-- BAC009S0764W0122.wav
+        |-- BAC009S0764W0123.wav
+        `-- trans.txt
+
+5 directories, 9 files
+
+
+

File descriptions:

+
+
    +
  • data/lang_phone/HLG.pt

    +
    +

    It is the decoding graph.

    +
    +
  • +
  • data/lang_phone/tokens.txt

    +
    +

    It contains tokens and their IDs. +Provided only for convenience so that you can look up the SOS/EOS ID easily.

    +
    +
  • +
  • data/lang_phone/words.txt

    +
    +

    It contains words and their IDs.

    +
    +
  • +
  • exp/pretrained.pt

    +
    +

    It contains pre-trained model parameters, obtained by averaging +checkpoints from epoch-18.pt to epoch-40.pt. +Note: We have removed optimizer state_dict to reduce file size.

    +
    +
  • +
  • test_waves/*.wav

    +
    +

    It contains some test sound files from Aishell test dataset.

    +
    +
  • +
  • test_waves/trans.txt

    +
    +

    It contains the reference transcripts for the sound files in test_waves/.

    +
    +
  • +
+
+

The information of the test sound files is listed below:

+
$ soxi tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_wavs/*.wav
+
+Input File     : 'tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0121.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.20 = 67263 samples ~ 315.295 CDDA sectors
+File Size      : 135k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+
+Input File     : 'tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0122.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.12 = 65840 samples ~ 308.625 CDDA sectors
+File Size      : 132k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+
+Input File     : 'tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0123.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.00 = 64000 samples ~ 300 CDDA sectors
+File Size      : 128k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+Total Duration of 3 files: 00:00:12.32
+
+
+
+
+

Usage

+
$ cd egs/aishell/ASR
+$ ./tdnn_lstm_ctc/pretrained.py --help
+
+
+

displays the help information.

+
+

HLG decoding

+

HLG decoding uses the best path of the decoding lattice as the decoding result.

+

The command to run HLG decoding is:

+
$ cd egs/aishell/ASR
+$ ./tdnn_lstm_ctc/pretrained.py \
+  --checkpoint ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/exp/pretrained.pt \
+  --words-file ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/data/lang_phone/words.txt \
+  --HLG ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/data/lang_phone/HLG.pt \
+  --method 1best \
+  ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_wavs/BAC009S0764W0121.wav \
+  ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_wavs/BAC009S0764W0122.wav \
+  ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_wavs/BAC009S0764W0123.wav
+
+
+

The output is given below:

+
2021-09-13 15:00:55,858 INFO [pretrained.py:140] device: cuda:0
+2021-09-13 15:00:55,858 INFO [pretrained.py:142] Creating model
+2021-09-13 15:01:05,389 INFO [pretrained.py:154] Loading HLG from ./tmp/icefall_asr_aishell_tdnn_lstm_ctc/data/lang_phone/HLG.pt
+2021-09-13 15:01:06,531 INFO [pretrained.py:161] Constructing Fbank computer
+2021-09-13 15:01:06,536 INFO [pretrained.py:171] Reading sound files: ['./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0121.wav', './tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0122.wav', './tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0123.wav']
+2021-09-13 15:01:06,539 INFO [pretrained.py:177] Decoding started
+2021-09-13 15:01:06,917 INFO [pretrained.py:207] Use HLG decoding
+2021-09-13 15:01:07,129 INFO [pretrained.py:220]
+./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0121.wav:
+甚至 出现 交易 几乎 停滞  情况
+
+./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0122.wav:
+一二 线 城市 虽然  处于 调整 
+
+./tmp/icefall_asr_aishell_tdnn_lstm_ctc/test_waves/BAC009S0764W0123.wav:
+ 因为 聚集  过多 公共 资源
+
+
+2021-09-13 15:01:07,129 INFO [pretrained.py:222] Decoding Done
+
+
+
+
+
+
+

Colab notebook

+

We do provide a colab notebook for this recipe showing how to use a pre-trained model.

+

aishell asr conformer ctc colab notebook

+

Congratulations! You have finished the aishell ASR recipe with +TDNN-LSTM CTC models in icefall.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/index.html b/recipes/index.html new file mode 100644 index 000000000..3d873b7fe --- /dev/null +++ b/recipes/index.html @@ -0,0 +1,142 @@ + + + + + + + Recipes — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Recipes

+

This page contains various recipes in icefall. +Currently, only speech recognition recipes are provided.

+

We may add recipes for other tasks as well in the future.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/librispeech/conformer_ctc.html b/recipes/librispeech/conformer_ctc.html new file mode 100644 index 000000000..1b5b0d512 --- /dev/null +++ b/recipes/librispeech/conformer_ctc.html @@ -0,0 +1,1115 @@ + + + + + + + Conformer CTC — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Conformer CTC

+

This tutorial shows you how to run a conformer ctc model +with the LibriSpeech dataset.

+
+

Hint

+

We assume you have read the page Installation and have setup +the environment for icefall.

+
+
+

Hint

+

We recommend you to use a GPU or several GPUs to run this recipe.

+
+

In this tutorial, you will learn:

+
+
    +
    1. +
    2. How to prepare data for training and decoding

    3. +
    +
  • +
    1. +
    2. How to start the training, either with a single GPU or multiple GPUs

    3. +
    +
  • +
    1. +
    2. How to do decoding after training, with n-gram LM rescoring and attention decoder rescoring

    3. +
    +
  • +
    1. +
    2. How to use a pre-trained model, provided by us

    3. +
    +
  • +
    1. +
    2. How to deploy your trained model in C++, without Python dependencies

    3. +
    +
  • +
+
+
+

Data preparation

+
$ cd egs/librispeech/ASR
+$ ./prepare.sh
+
+
+

The script ./prepare.sh handles the data preparation for you, automagically. +All you need to do is to run it.

+

The data preparation contains several stages, you can use the following two +options:

+
+
    +
  • --stage

  • +
  • --stop-stage

  • +
+
+

to control which stage(s) should be run. By default, all stages are executed.

+

For example,

+
$ cd egs/librispeech/ASR
+$ ./prepare.sh --stage 0 --stop-stage 0
+
+
+

means to run only stage 0.

+

To run stage 2 to stage 5, use:

+
$ ./prepare.sh --stage 2 --stop-stage 5
+
+
+
+

Hint

+

If you have pre-downloaded the LibriSpeech +dataset and the musan dataset, say, +they are saved in /tmp/LibriSpeech and /tmp/musan, you can modify +the dl_dir variable in ./prepare.sh to point to /tmp so that +./prepare.sh won’t re-download them.

+
+
+

Note

+

All generated files by ./prepare.sh, e.g., features, lexicon, etc, +are saved in ./data directory.

+
+

We provide the following YouTube video showing how to run ./prepare.sh.

+
+

Note

+

To get the latest news of next-gen Kaldi, please subscribe +the following YouTube channel by Nadira Povey:

+
+
+
+
+
+
+

Training

+
+

Configurable options

+
$ cd egs/librispeech/ASR
+$ ./conformer_ctc/train.py --help
+
+
+

shows you the training options that can be passed from the commandline. +The following options are used quite often:

+
+
    +
  • --full-libri

    +

    If it’s True, the training part uses all the training data, i.e., +960 hours. Otherwise, the training part uses only the subset +train-clean-100, which has 100 hours of training data.

    +
    +

    Caution

    +

    The training set is perturbed by speed with two factors: 0.9 and 1.1. +If --full-libri is True, each epoch actually processes +3x960 == 2880 hours of data.

    +
    +
  • +
  • --num-epochs

    +

    It is the number of epochs to train. For instance, +./conformer_ctc/train.py --num-epochs 30 trains for 30 epochs +and generates epoch-0.pt, epoch-1.pt, …, epoch-29.pt +in the folder ./conformer_ctc/exp.

    +
  • +
  • --start-epoch

    +

    It’s used to resume training. +./conformer_ctc/train.py --start-epoch 10 loads the +checkpoint ./conformer_ctc/exp/epoch-9.pt and starts +training from epoch 10, based on the state from epoch 9.

    +
  • +
  • --world-size

    +

    It is used for multi-GPU single-machine DDP training.

    +
    +
      +
      1. +
      2. If it is 1, then no DDP training is used.

      3. +
      +
    • +
      1. +
      2. If it is 2, then GPU 0 and GPU 1 are used for DDP training.

      3. +
      +
    • +
    +
    +

    The following shows some use cases with it.

    +
    +

    Use case 1: You have 4 GPUs, but you only want to use GPU 0 and +GPU 2 for training. You can do the following:

    +
    +
    $ cd egs/librispeech/ASR
    +$ export CUDA_VISIBLE_DEVICES="0,2"
    +$ ./conformer_ctc/train.py --world-size 2
    +
    +
    +
    +

    Use case 2: You have 4 GPUs and you want to use all of them +for training. You can do the following:

    +
    +
    $ cd egs/librispeech/ASR
    +$ ./conformer_ctc/train.py --world-size 4
    +
    +
    +
    +

    Use case 3: You have 4 GPUs but you only want to use GPU 3 +for training. You can do the following:

    +
    +
    $ cd egs/librispeech/ASR
    +$ export CUDA_VISIBLE_DEVICES="3"
    +$ ./conformer_ctc/train.py --world-size 1
    +
    +
    +
    +
    +
    +

    Caution

    +

    Only multi-GPU single-machine DDP training is implemented at present. +Multi-GPU multi-machine DDP training will be added later.

    +
    +
  • +
  • --max-duration

    +

    It specifies the number of seconds over all utterances in a +batch, before padding. +If you encounter CUDA OOM, please reduce it. For instance, if +your are using V100 NVIDIA GPU, we recommend you to set it to 200.

    +
    +

    Hint

    +

    Due to padding, the number of seconds of all utterances in a +batch will usually be larger than --max-duration.

    +

    A larger value for --max-duration may cause OOM during training, +while a smaller value may increase the training time. You have to +tune it.

    +
    +
  • +
+
+
+
+

Pre-configured options

+

There are some training options, e.g., weight decay, +number of warmup steps, results dir, etc, +that are not passed from the commandline. +They are pre-configured by the function get_params() in +conformer_ctc/train.py

+

You don’t need to change these pre-configured parameters. If you really need to change +them, please modify ./conformer_ctc/train.py directly.

+
+
+

Training logs

+

Training logs and checkpoints are saved in conformer_ctc/exp. +You will find the following files in that directory:

+
+
    +
  • epoch-0.pt, epoch-1.pt, …

    +

    These are checkpoint files, containing model state_dict and optimizer state_dict. +To resume training from some checkpoint, say epoch-10.pt, you can use:

    +
    +
    $ ./conformer_ctc/train.py --start-epoch 11
    +
    +
    +
    +
  • +
  • tensorboard/

    +

    This folder contains TensorBoard logs. Training loss, validation loss, learning +rate, etc, are recorded in these logs. You can visualize them by:

    +
    +
    $ cd conformer_ctc/exp/tensorboard
    +$ tensorboard dev upload --logdir . --description "Conformer CTC training for LibriSpeech with icefall"
    +
    +
    +
    +

    It will print something like below:

    +
    +
    TensorFlow installation not found - running with reduced feature set.
    +Upload started and will continue reading any new data as it's added to the logdir.
    +
    +To stop uploading, press Ctrl-C.
    +
    +New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/lzGnETjwRxC3yghNMd4kPw/
    +
    +[2021-08-24T16:42:43] Started scanning logdir.
    +Uploading 4540 scalars...
    +
    +
    +
    +

    Note there is a URL in the above output, click it and you will see +the following screenshot:

    +
    +
    +TensorBoard screenshot +
    +

    Fig. 4 TensorBoard screenshot.

    +
    +
    +
    +
  • +
  • log/log-train-xxxx

    +

    It is the detailed training log in text format, same as the one +you saw printed to the console during training.

    +
  • +
+
+
+
+

Usage examples

+

The following shows typical use cases:

+
+

Case 1

+
$ cd egs/librispeech/ASR
+$ ./conformer_ctc/train.py --max-duration 200 --full-libri 0
+
+
+

It uses --max-duration of 200 to avoid OOM. Also, it uses only +a subset of the LibriSpeech data for training.

+
+
+

Case 2

+
$ cd egs/librispeech/ASR
+$ export CUDA_VISIBLE_DEVICES="0,3"
+$ ./conformer_ctc/train.py --world-size 2
+
+
+

It uses GPU 0 and GPU 3 for DDP training.

+
+
+

Case 3

+
$ cd egs/librispeech/ASR
+$ ./conformer_ctc/train.py --num-epochs 10 --start-epoch 3
+
+
+

It loads checkpoint ./conformer_ctc/exp/epoch-2.pt and starts +training from epoch 3. Also, it trains for 10 epochs.

+
+
+
+
+

Decoding

+

The decoding part uses checkpoints saved by the training part, so you have +to run the training part first.

+
$ cd egs/librispeech/ASR
+$ ./conformer_ctc/decode.py --help
+
+
+

shows the options for decoding.

+

The commonly used options are:

+
+
    +
  • --method

    +

    This specifies the decoding method. This script supports 7 decoding methods. +As for ctc decoding, it uses a sentence piece model to convert word pieces to words. +And it needs neither a lexicon nor an n-gram LM.

    +

    For example, the following command uses CTC topology for decoding:

    +
    $ cd egs/librispeech/ASR
    +$ ./conformer_ctc/decode.py --method ctc-decoding --max-duration 300
    +# Caution: The above command is tested with a model with vocab size 500.
    +
    +
    +

    And the following command uses attention decoder for rescoring:

    +
    $ cd egs/librispeech/ASR
    +$ ./conformer_ctc/decode.py --method attention-decoder --max-duration 30 --nbest-scale 0.5
    +
    +
    +
  • +
  • --nbest-scale

    +

    It is used to scale down lattice scores so that there are more unique +paths for rescoring.

    +
  • +
  • --max-duration

    +

    It has the same meaning as the one during training. A larger +value may cause OOM.

    +
  • +
+
+

Here are some results for CTC decoding with a vocab size of 500:

+

Usage:

+
$ cd egs/librispeech/ASR
+# NOTE: Tested with a model with vocab size 500.
+# It won't work for a model with vocab size 5000.
+$ ./conformer_ctc/decode.py \
+    --epoch 25 \
+    --avg 1 \
+    --max-duration 300 \
+    --exp-dir conformer_ctc/exp \
+    --lang-dir data/lang_bpe_500 \
+    --method ctc-decoding
+
+
+

The output is given below:

+
2021-09-26 12:44:31,033 INFO [decode.py:537] Decoding started
+2021-09-26 12:44:31,033 INFO [decode.py:538]
+{'lm_dir': PosixPath('data/lm'), 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True,
+'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 6, 'search_beam': 20, 'output_beam': 8,
+'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True,
+'epoch': 25, 'avg': 1, 'method': 'ctc-decoding', 'num_paths': 100, 'nbest_scale': 0.5,
+'export': False, 'exp_dir': PosixPath('conformer_ctc/exp'), 'lang_dir': PosixPath('data/lang_bpe_500'), 'full_libri': False,
+'feature_dir': PosixPath('data/fbank'), 'max_duration': 100, 'bucketing_sampler': False, 'num_buckets': 30,
+'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False,
+'shuffle': True, 'return_cuts': True, 'num_workers': 2}
+2021-09-26 12:44:31,406 INFO [lexicon.py:113] Loading pre-compiled data/lang_bpe_500/Linv.pt
+2021-09-26 12:44:31,464 INFO [decode.py:548] device: cuda:0
+2021-09-26 12:44:36,171 INFO [checkpoint.py:92] Loading checkpoint from conformer_ctc/exp/epoch-25.pt
+2021-09-26 12:44:36,776 INFO [decode.py:652] Number of model parameters: 109226120
+2021-09-26 12:44:37,714 INFO [decode.py:473] batch 0/206, cuts processed until now is 12
+2021-09-26 12:45:15,944 INFO [decode.py:473] batch 100/206, cuts processed until now is 1328
+2021-09-26 12:45:54,443 INFO [decode.py:473] batch 200/206, cuts processed until now is 2563
+2021-09-26 12:45:56,411 INFO [decode.py:494] The transcripts are stored in conformer_ctc/exp/recogs-test-clean-ctc-decoding.txt
+2021-09-26 12:45:56,592 INFO [utils.py:331] [test-clean-ctc-decoding] %WER 3.26% [1715 / 52576, 163 ins, 128 del, 1424 sub ]
+2021-09-26 12:45:56,807 INFO [decode.py:506] Wrote detailed error stats to conformer_ctc/exp/errs-test-clean-ctc-decoding.txt
+2021-09-26 12:45:56,808 INFO [decode.py:522]
+For test-clean, WER of different settings are:
+ctc-decoding    3.26    best for test-clean
+
+2021-09-26 12:45:57,362 INFO [decode.py:473] batch 0/203, cuts processed until now is 15
+2021-09-26 12:46:35,565 INFO [decode.py:473] batch 100/203, cuts processed until now is 1477
+2021-09-26 12:47:15,106 INFO [decode.py:473] batch 200/203, cuts processed until now is 2922
+2021-09-26 12:47:16,131 INFO [decode.py:494] The transcripts are stored in conformer_ctc/exp/recogs-test-other-ctc-decoding.txt
+2021-09-26 12:47:16,208 INFO [utils.py:331] [test-other-ctc-decoding] %WER 8.21% [4295 / 52343, 396 ins, 315 del, 3584 sub ]
+2021-09-26 12:47:16,432 INFO [decode.py:506] Wrote detailed error stats to conformer_ctc/exp/errs-test-other-ctc-decoding.txt
+2021-09-26 12:47:16,432 INFO [decode.py:522]
+For test-other, WER of different settings are:
+ctc-decoding    8.21    best for test-other
+
+2021-09-26 12:47:16,433 INFO [decode.py:680] Done!
+
+
+
+
+

Pre-trained Model

+

We have uploaded a pre-trained model to +https://huggingface.co/csukuangfj/icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09

+

We describe how to use the pre-trained model to transcribe a sound file or +multiple sound files in the following.

+
+

Install kaldifeat

+

kaldifeat is used to +extract features for a single sound file or multiple sound files +at the same time.

+

Please refer to https://github.com/csukuangfj/kaldifeat for installation.

+
+
+

Download the pre-trained model

+

The following commands describe how to download the pre-trained model:

+
$ cd egs/librispeech/ASR
+$ git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09
+$ cd icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09
+$ git lfs pull
+
+
+
+

Caution

+

You have to use git lfs pull to download the pre-trained model. +Otherwise, you will have the following issue when running decode.py:

+
+
_pickle.UnpicklingError: invalid load key, 'v'
+
+
+
+

To fix that issue, please use:

+
+
cd icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09
+git lfs pull
+
+
+
+
+
+

Caution

+

In order to use this pre-trained model, your k2 version has to be v1.9 or later.

+
+

After downloading, you will have the following files:

+
$ cd egs/librispeech/ASR
+$ tree icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09
+
+
+
icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09
+|-- README.md
+|-- data
+|   |-- lang_bpe_500
+|   |   |-- HLG.pt
+|   |   |-- HLG_modified.pt
+|   |   |-- bpe.model
+|   |   |-- tokens.txt
+|   |   `-- words.txt
+|   `-- lm
+|       `-- G_4_gram.pt
+|-- exp
+|   |-- cpu_jit.pt
+|   `-- pretrained.pt
+|-- log
+|   `-- log-decode-2021-11-09-17-38-28
+`-- test_wavs
+    |-- 1089-134686-0001.wav
+    |-- 1221-135766-0001.wav
+    |-- 1221-135766-0002.wav
+    `-- trans.txt
+
+
+
+
File descriptions:
    +
  • data/lang_bpe_500/HLG.pt

    +
    +

    It is the decoding graph.

    +
    +
  • +
  • data/lang_bpe_500/HLG_modified.pt

    +
    +

    It uses a modified CTC topology while building HLG.

    +
    +
  • +
  • data/lang_bpe_500/bpe.model

    +
    +

    It is a sentencepiece model. You can use it to reproduce our results.

    +
    +
  • +
  • data/lang_bpe_500/tokens.txt

    +
    +

    It contains tokens and their IDs, generated from bpe.model. +Provided only for convenience so that you can look up the SOS/EOS ID easily.

    +
    +
  • +
  • data/lang_bpe_500/words.txt

    +
    +

    It contains words and their IDs.

    +
    +
  • +
  • data/lm/G_4_gram.pt

    +
    +

    It is a 4-gram LM, used for n-gram LM rescoring.

    +
    +
  • +
  • exp/pretrained.pt

    +
    +

    It contains pre-trained model parameters, obtained by averaging +checkpoints from epoch-23.pt to epoch-77.pt. +Note: We have removed optimizer state_dict to reduce file size.

    +
    +
  • +
  • exp/cpu_jit.pt

    +
    +

    It contains torch scripted model that can be deployed in C++.

    +
    +
  • +
  • test_wavs/*.wav

    +
    +

    It contains some test sound files from LibriSpeech test-clean dataset.

    +
    +
  • +
  • test_wavs/trans.txt

    +
    +

    It contains the reference transcripts for the sound files in test_wavs/.

    +
    +
  • +
+
+
+

The information of the test sound files is listed below:

+
$ soxi icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/*.wav
+
+Input File     : 'icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:06.62 = 106000 samples ~ 496.875 CDDA sectors
+File Size      : 212k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+
+Input File     : 'icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:16.71 = 267440 samples ~ 1253.62 CDDA sectors
+File Size      : 535k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+
+Input File     : 'icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.83 = 77200 samples ~ 361.875 CDDA sectors
+File Size      : 154k
+Bit Rate       : 256k
+Sample Encoding: 16-bit Signed Integer PCM
+
+Total Duration of 3 files: 00:00:28.16
+
+
+
+
+

Usage

+
$ cd egs/librispeech/ASR
+$ ./conformer_ctc/pretrained.py --help
+
+
+

displays the help information.

+

It supports 4 decoding methods:

+
+
    +
  • CTC decoding

  • +
  • HLG decoding

  • +
  • HLG + n-gram LM rescoring

  • +
  • HLG + n-gram LM rescoring + attention decoder rescoring

  • +
+
+
+

CTC decoding

+

CTC decoding uses the best path of the decoding lattice as the decoding result +without any LM or lexicon.

+

The command to run CTC decoding is:

+
$ cd egs/librispeech/ASR
+$ ./conformer_ctc/pretrained.py \
+   --checkpoint ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt \
+   --bpe-model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/bpe.model \
+   --method ctc-decoding \
+   --num-classes 500 \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+
+
+

The output is given below:

+
2021-11-10 12:12:29,554 INFO [pretrained.py:260] {'sample_rate': 16000, 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 0, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt', 'words_file': None, 'HLG': None, 'bpe_model': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/bpe.model', 'method': 'ctc-decoding', 'G': None, 'num_paths': 100, 'ngram_lm_scale': 1.3, 'attention_decoder_scale': 1.2, 'nbest_scale': 0.5, 'sos_id': 1, 'num_classes': 500, 'eos_id': 1, 'sound_files': ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'], 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '7178d67e594bc7fa89c2b331ad7bd1c62a6a9eb4', 'k2-git-date': 'Tue Oct 26 22:12:54 2021', 'lhotse-version': '0.11.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'bpe-500', 'icefall-git-sha1': '8d93169-dirty', 'icefall-git-date': 'Wed Nov 10 11:52:44 2021', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-fix', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-bpe-500/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-bpe-500/lhotse/__init__.py'}}
+2021-11-10 12:12:29,554 INFO [pretrained.py:266] device: cuda:0
+2021-11-10 12:12:29,554 INFO [pretrained.py:268] Creating model
+2021-11-10 12:12:35,600 INFO [pretrained.py:285] Constructing Fbank computer
+2021-11-10 12:12:35,601 INFO [pretrained.py:295] Reading sound files: ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav']
+2021-11-10 12:12:35,758 INFO [pretrained.py:301] Decoding started
+2021-11-10 12:12:36,025 INFO [pretrained.py:319] Use CTC decoding
+2021-11-10 12:12:36,204 INFO [pretrained.py:425]
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav:
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROFFELS
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav:
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED B
+OSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav:
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+2021-11-10 12:12:36,204 INFO [pretrained.py:427] Decoding Done
+
+
+
+
+

HLG decoding

+

HLG decoding uses the best path of the decoding lattice as the decoding result.

+

The command to run HLG decoding is:

+
$ cd egs/librispeech/ASR
+$ ./conformer_ctc/pretrained.py \
+   --checkpoint ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt \
+   --words-file ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \
+   --method 1best \
+   --num-classes 500 \
+   --HLG ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+
+
+

The output is given below:

+
2021-11-10 13:33:03,723 INFO [pretrained.py:260] {'sample_rate': 16000, 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 0, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt', 'words_file': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt', 'HLG': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt', 'bpe_model': None, 'method': '1best', 'G': None, 'num_paths': 100, 'ngram_lm_scale': 1.3, 'attention_decoder_scale': 1.2, 'nbest_scale': 0.5, 'sos_id': 1, 'num_classes': 500, 'eos_id': 1, 'sound_files': ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'], 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '7178d67e594bc7fa89c2b331ad7bd1c62a6a9eb4', 'k2-git-date': 'Tue Oct 26 22:12:54 2021', 'lhotse-version': '0.11.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'bpe-500', 'icefall-git-sha1': '8d93169-dirty', 'icefall-git-date': 'Wed Nov 10 11:52:44 2021', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-fix', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-bpe-500/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-bpe-500/lhotse/__init__.py'}}
+2021-11-10 13:33:03,723 INFO [pretrained.py:266] device: cuda:0
+2021-11-10 13:33:03,723 INFO [pretrained.py:268] Creating model
+2021-11-10 13:33:09,775 INFO [pretrained.py:285] Constructing Fbank computer
+2021-11-10 13:33:09,776 INFO [pretrained.py:295] Reading sound files: ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav']
+2021-11-10 13:33:09,881 INFO [pretrained.py:301] Decoding started
+2021-11-10 13:33:09,951 INFO [pretrained.py:352] Loading HLG from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt
+2021-11-10 13:33:13,234 INFO [pretrained.py:384] Use HLG decoding
+2021-11-10 13:33:13,571 INFO [pretrained.py:425]
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav:
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav:
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav:
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+2021-11-10 13:33:13,571 INFO [pretrained.py:427] Decoding Done
+
+
+
+
+

HLG decoding + LM rescoring

+

It uses an n-gram LM to rescore the decoding lattice and the best +path of the rescored lattice is the decoding result.

+

The command to run HLG decoding + LM rescoring is:

+
$ cd egs/librispeech/ASR
+./conformer_ctc/pretrained.py \
+   --checkpoint ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt \
+   --words-file ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \
+   --method whole-lattice-rescoring \
+   --num-classes 500 \
+   --HLG ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \
+   --G ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt \
+   --ngram-lm-scale 1.0 \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+
+
+

Its output is:

+
2021-11-10 13:39:55,857 INFO [pretrained.py:260] {'sample_rate': 16000, 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 0, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt', 'words_file': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt', 'HLG': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt', 'bpe_model': None, 'method': 'whole-lattice-rescoring', 'G': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt', 'num_paths': 100, 'ngram_lm_scale': 1.0, 'attention_decoder_scale': 1.2, 'nbest_scale': 0.5, 'sos_id': 1, 'num_classes': 500, 'eos_id': 1, 'sound_files': ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'], 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-$it-sha1': '7178d67e594bc7fa89c2b331ad7bd1c62a6a9eb4', 'k2-git-date': 'Tue Oct 26 22:12:54 2021', 'lhotse-version': '0.11.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'bpe-500', 'icefall-git-sha1': '8d93169-dirty', 'icefall-git-date': 'Wed Nov 10 11:52:44 2021', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-fix', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-bpe-500/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-bpe-500/lhotse/__init__.py'}}
+2021-11-10 13:39:55,858 INFO [pretrained.py:266] device: cuda:0
+2021-11-10 13:39:55,858 INFO [pretrained.py:268] Creating model
+2021-11-10 13:40:01,979 INFO [pretrained.py:285] Constructing Fbank computer
+2021-11-10 13:40:01,980 INFO [pretrained.py:295] Reading sound files: ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav']
+2021-11-10 13:40:02,055 INFO [pretrained.py:301] Decoding started
+2021-11-10 13:40:02,117 INFO [pretrained.py:352] Loading HLG from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt
+2021-11-10 13:40:05,051 INFO [pretrained.py:363] Loading G from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt
+2021-11-10 13:40:18,959 INFO [pretrained.py:389] Use HLG decoding + LM rescoring
+2021-11-10 13:40:19,546 INFO [pretrained.py:425]
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav:
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav:
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav:
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+2021-11-10 13:40:19,546 INFO [pretrained.py:427] Decoding Done
+
+
+
+
+

HLG decoding + LM rescoring + attention decoder rescoring

+

It uses an n-gram LM to rescore the decoding lattice, extracts +n paths from the rescored lattice, recores the extracted paths with +an attention decoder. The path with the highest score is the decoding result.

+

The command to run HLG decoding + LM rescoring + attention decoder rescoring is:

+
$ cd egs/librispeech/ASR
+$ ./conformer_ctc/pretrained.py \
+   --checkpoint ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt \
+   --words-file ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \
+   --method attention-decoder \
+   --num-classes 500 \
+   --HLG ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \
+   --G ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt \
+   --ngram-lm-scale 2.0 \
+   --attention-decoder-scale 2.0 \
+   --nbest-scale 0.5 \
+   --num-paths 100 \
+   --sos-id 1 \
+   --eos-id 1 \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \
+   ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+
+
+

The output is below:

+
2021-11-10 13:43:45,598 INFO [pretrained.py:260] {'sample_rate': 16000, 'subsampling_factor': 4, 'vgg_frontend': False, 'use_feat_batchnorm': True, 'feature_dim': 80, 'nhead': 8, 'attention_dim': 512, 'num_decoder_layers': 6, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/pretrained.pt', 'words_file': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt', 'HLG': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt', 'bpe_model': None, 'method': 'attention-decoder', 'G': './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt', 'num_paths': 100, 'ngram_lm_scale': 2.0, 'attention_decoder_scale': 2.0, 'nbest_scale': 0.5, 'sos_id': 1, 'num_classes': 500, 'eos_id': 1, 'sound_files': ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav'], 'env_info': {'k2-version': '1.9', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '7178d67e594bc7fa89c2b331ad7bd1c62a6a9eb4', 'k2-git-date': 'Tue Oct 26 22:12:54 2021', 'lhotse-version': '0.11.0.dev+missing.version.file', 'torch-cuda-available': True, 'torch-cuda-version': '10.1', 'python-version': '3.8', 'icefall-git-branch': 'bpe-500', 'icefall-git-sha1': '8d93169-dirty', 'icefall-git-date': 'Wed Nov 10 11:52:44 2021', 'icefall-path': '/ceph-fj/fangjun/open-source-2/icefall-fix', 'k2-path': '/ceph-fj/fangjun/open-source-2/k2-bpe-500/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-fj/fangjun/open-source-2/lhotse-bpe-500/lhotse/__init__.py'}}
+2021-11-10 13:43:45,599 INFO [pretrained.py:266] device: cuda:0
+2021-11-10 13:43:45,599 INFO [pretrained.py:268] Creating model
+2021-11-10 13:43:51,833 INFO [pretrained.py:285] Constructing Fbank computer
+2021-11-10 13:43:51,834 INFO [pretrained.py:295] Reading sound files: ['./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav', './icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav']
+2021-11-10 13:43:51,915 INFO [pretrained.py:301] Decoding started
+2021-11-10 13:43:52,076 INFO [pretrained.py:352] Loading HLG from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt
+2021-11-10 13:43:55,110 INFO [pretrained.py:363] Loading G from ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt
+2021-11-10 13:44:09,329 INFO [pretrained.py:397] Use HLG + LM rescoring + attention decoder rescoring
+2021-11-10 13:44:10,192 INFO [pretrained.py:425]
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav:
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav:
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav:
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+2021-11-10 13:44:10,192 INFO [pretrained.py:427] Decoding Done
+
+
+
+
+
+

Compute WER with the pre-trained model

+

To check the WER of the pre-trained model on the test datasets, run:

+
$ cd egs/librispeech/ASR
+$ cd icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/
+$ ln -s pretrained.pt epoch-999.pt
+$ cd ../..
+$ ./conformer_ctc/decode.py \
+    --exp-dir ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp \
+    --lang-dir ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500 \
+    --lm-dir ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm \
+    --epoch 999 \
+    --avg 1 \
+    --concatenate-cuts 0 \
+    --bucketing-sampler 1 \
+    --max-duration 30 \
+    --num-paths 1000 \
+    --method attention-decoder \
+    --nbest-scale 0.5
+
+
+
+
+
+

Colab notebook

+

We do provide a colab notebook for this recipe showing how to use a pre-trained model.

+

librispeech asr conformer ctc colab notebook

+
+

Hint

+

Due to limited memory provided by Colab, you have to upgrade to Colab Pro to +run HLG decoding + LM rescoring and +HLG decoding + LM rescoring + attention decoder rescoring. +Otherwise, you can only run HLG decoding with Colab.

+
+

Congratulations! You have finished the LibriSpeech ASR recipe with +conformer CTC models in icefall.

+

If you want to deploy your trained model in C++, please read the following section.

+
+
+

Deployment with C++

+

This section describes how to deploy the pre-trained model in C++, without +Python dependencies.

+
+

Hint

+

At present, it does NOT support streaming decoding.

+
+

First, let us compile k2 from source:

+
$ cd $HOME
+$ git clone https://github.com/k2-fsa/k2
+$ cd k2
+$ git checkout v2.0-pre
+
+
+
+

Caution

+

You have to switch to the branch v2.0-pre!

+
+
$ mkdir build-release
+$ cd build-release
+$ cmake -DCMAKE_BUILD_TYPE=Release ..
+$ make -j ctc_decode hlg_decode ngram_lm_rescore attention_rescore
+
+# You will find four binaries in `./bin`, i.e.,
+# ./bin/ctc_decode, ./bin/hlg_decode,
+# ./bin/ngram_lm_rescore, and ./bin/attention_rescore
+
+
+

Now you are ready to go!

+

Assume you have run:

+
+
$ cd k2/build-release
+$ ln -s /path/to/icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09 ./
+
+
+
+

To view the usage of ./bin/ctc_decode, run:

+
$ ./bin/ctc_decode
+
+
+

It will show you the following message:

+
Please provide --nn_model
+
+This file implements decoding with a CTC topology, without any
+kinds of LM or lexicons.
+
+Usage:
+  ./bin/ctc_decode \
+    --use_gpu true \
+    --nn_model <path to torch scripted pt file> \
+    --bpe_model <path to pre-trained BPE model> \
+    <path to foo.wav> \
+    <path to bar.wav> \
+    <more waves if any>
+
+To see all possible options, use
+  ./bin/ctc_decode --help
+
+Caution:
+ - Only sound files (*.wav) with single channel are supported.
+ - It assumes the model is conformer_ctc/transformer.py from icefall.
+   If you use a different model, you have to change the code
+   related to `model.forward` in this file.
+
+
+
./bin/ctc_decode \
+  --use_gpu true \
+  --nn_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/cpu_jit.pt \
+  --bpe_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/bpe.model \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+
+
+

Its output is:

+
2021-11-10 13:57:55.316 [I] k2/torch/bin/ctc_decode.cu:105:int main(int, char**) Use GPU
+2021-11-10 13:57:55.316 [I] k2/torch/bin/ctc_decode.cu:109:int main(int, char**) Device: cuda:0
+2021-11-10 13:57:55.316 [I] k2/torch/bin/ctc_decode.cu:118:int main(int, char**) Load wave files
+2021-11-10 13:58:01.221 [I] k2/torch/bin/ctc_decode.cu:125:int main(int, char**) Build Fbank computer
+2021-11-10 13:58:01.222 [I] k2/torch/bin/ctc_decode.cu:136:int main(int, char**) Compute features
+2021-11-10 13:58:01.228 [I] k2/torch/bin/ctc_decode.cu:144:int main(int, char**) Load neural network model
+2021-11-10 13:58:02.19 [I] k2/torch/bin/ctc_decode.cu:159:int main(int, char**) Compute nnet_output
+2021-11-10 13:58:02.543 [I] k2/torch/bin/ctc_decode.cu:174:int main(int, char**) Build CTC topo
+2021-11-10 13:58:02.547 [I] k2/torch/bin/ctc_decode.cu:177:int main(int, char**) Decoding
+2021-11-10 13:58:02.708 [I] k2/torch/bin/ctc_decode.cu:207:int main(int, char**)
+Decoding result:
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROFFELS
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+
+
./bin/hlg_decode \
+  --use_gpu true \
+  --nn_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/cpu_jit.pt \
+  --hlg ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \
+  --word_table ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+
+
+

The output is:

+
2021-11-10 13:59:04.729 [I] k2/torch/bin/hlg_decode.cu:111:int main(int, char**) Use GPU
+2021-11-10 13:59:04.729 [I] k2/torch/bin/hlg_decode.cu:115:int main(int, char**) Device: cuda:0
+2021-11-10 13:59:04.729 [I] k2/torch/bin/hlg_decode.cu:124:int main(int, char**) Load wave files
+2021-11-10 13:59:10.702 [I] k2/torch/bin/hlg_decode.cu:131:int main(int, char**) Build Fbank computer
+2021-11-10 13:59:10.703 [I] k2/torch/bin/hlg_decode.cu:142:int main(int, char**) Compute features
+2021-11-10 13:59:10.707 [I] k2/torch/bin/hlg_decode.cu:150:int main(int, char**) Load neural network model
+2021-11-10 13:59:11.545 [I] k2/torch/bin/hlg_decode.cu:165:int main(int, char**) Compute nnet_output
+2021-11-10 13:59:12.72 [I] k2/torch/bin/hlg_decode.cu:180:int main(int, char**) Load ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt
+2021-11-10 13:59:12.994 [I] k2/torch/bin/hlg_decode.cu:185:int main(int, char**) Decoding
+2021-11-10 13:59:13.268 [I] k2/torch/bin/hlg_decode.cu:216:int main(int, char**)
+Decoding result:
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+
+
./bin/ngram_lm_rescore \
+  --use_gpu true \
+  --nn_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/cpu_jit.pt \
+  --hlg ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \
+  --g ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt \
+  --ngram_lm_scale 1.0 \
+  --word_table ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+
+
+

The output is:

+
2021-11-10 14:00:55.279 [I] k2/torch/bin/ngram_lm_rescore.cu:122:int main(int, char**) Use GPU
+2021-11-10 14:00:55.280 [I] k2/torch/bin/ngram_lm_rescore.cu:126:int main(int, char**) Device: cuda:0
+2021-11-10 14:00:55.280 [I] k2/torch/bin/ngram_lm_rescore.cu:135:int main(int, char**) Load wave files
+2021-11-10 14:01:01.214 [I] k2/torch/bin/ngram_lm_rescore.cu:142:int main(int, char**) Build Fbank computer
+2021-11-10 14:01:01.215 [I] k2/torch/bin/ngram_lm_rescore.cu:153:int main(int, char**) Compute features
+2021-11-10 14:01:01.219 [I] k2/torch/bin/ngram_lm_rescore.cu:161:int main(int, char**) Load neural network model
+2021-11-10 14:01:01.945 [I] k2/torch/bin/ngram_lm_rescore.cu:176:int main(int, char**) Compute nnet_output
+2021-11-10 14:01:02.475 [I] k2/torch/bin/ngram_lm_rescore.cu:191:int main(int, char**) Load ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt
+2021-11-10 14:01:03.398 [I] k2/torch/bin/ngram_lm_rescore.cu:199:int main(int, char**) Decoding
+2021-11-10 14:01:03.515 [I] k2/torch/bin/ngram_lm_rescore.cu:205:int main(int, char**) Load n-gram LM: ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt
+2021-11-10 14:01:07.432 [W] k2/torch/csrc/deserialization.cu:441:k2::FsaClass k2::LoadFsa(const string&, c10::optional<c10::Device>)
+Ignore non tensor attribute: 'dummy' of type: Int
+2021-11-10 14:01:07.589 [I] k2/torch/bin/ngram_lm_rescore.cu:214:int main(int, char**) Rescore with an n-gram LM
+2021-11-10 14:01:08.68 [I] k2/torch/bin/ngram_lm_rescore.cu:242:int main(int, char**)
+Decoding result:
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+
+
./bin/attention_rescore \
+  --use_gpu true \
+  --nn_model ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/exp/cpu_jit.pt \
+  --hlg ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt \
+  --g ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt \
+  --ngram_lm_scale 2.0 \
+  --attention_scale 2.0 \
+  --num_paths 100 \
+  --nbest_scale 0.5 \
+  --word_table ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/words.txt \
+  --sos_id 1 \
+  --eos_id 1 \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav \
+  ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+
+
+

The output is:

+
2021-11-10 14:02:43.656 [I] k2/torch/bin/attention_rescore.cu:149:int main(int, char**) Use GPU
+2021-11-10 14:02:43.656 [I] k2/torch/bin/attention_rescore.cu:153:int main(int, char**) Device: cuda:0
+2021-11-10 14:02:43.656 [I] k2/torch/bin/attention_rescore.cu:162:int main(int, char**) Load wave files
+2021-11-10 14:02:49.216 [I] k2/torch/bin/attention_rescore.cu:169:int main(int, char**) Build Fbank computer
+2021-11-10 14:02:49.217 [I] k2/torch/bin/attention_rescore.cu:180:int main(int, char**) Compute features
+2021-11-10 14:02:49.222 [I] k2/torch/bin/attention_rescore.cu:188:int main(int, char**) Load neural network model
+2021-11-10 14:02:49.984 [I] k2/torch/bin/attention_rescore.cu:203:int main(int, char**) Compute nnet_output
+2021-11-10 14:02:50.624 [I] k2/torch/bin/attention_rescore.cu:220:int main(int, char**) Load ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lang_bpe_500/HLG.pt
+2021-11-10 14:02:51.519 [I] k2/torch/bin/attention_rescore.cu:228:int main(int, char**) Decoding
+2021-11-10 14:02:51.632 [I] k2/torch/bin/attention_rescore.cu:234:int main(int, char**) Load n-gram LM: ./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/data/lm/G_4_gram.pt
+2021-11-10 14:02:55.537 [W] k2/torch/csrc/deserialization.cu:441:k2::FsaClass k2::LoadFsa(const string&, c10::optional<c10::Device>) Ignore non tensor attribute: 'dummy' of type: Int
+2021-11-10 14:02:55.645 [I] k2/torch/bin/attention_rescore.cu:243:int main(int, char**) Rescore with an n-gram LM
+2021-11-10 14:02:55.970 [I] k2/torch/bin/attention_rescore.cu:246:int main(int, char**) Sample 100 paths
+2021-11-10 14:02:56.215 [I] k2/torch/bin/attention_rescore.cu:293:int main(int, char**) Run attention decoder
+2021-11-10 14:02:57.35 [I] k2/torch/bin/attention_rescore.cu:303:int main(int, char**) Rescoring
+2021-11-10 14:02:57.179 [I] k2/torch/bin/attention_rescore.cu:369:int main(int, char**)
+Decoding result:
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1089-134686-0001.wav
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0001.wav
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./icefall-asr-librispeech-conformer-ctc-jit-bpe-500-2021-11-09/test_wavs/1221-135766-0002.wav
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+
+

There is a Colab notebook showing you how to run a torch scripted model in C++. +Please see librispeech asr conformer ctc torch script colab notebook

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/librispeech/index.html b/recipes/librispeech/index.html new file mode 100644 index 000000000..5ef6d9104 --- /dev/null +++ b/recipes/librispeech/index.html @@ -0,0 +1,125 @@ + + + + + + + LibriSpeech — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

LibriSpeech

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/librispeech/tdnn_lstm_ctc.html b/recipes/librispeech/tdnn_lstm_ctc.html new file mode 100644 index 000000000..a20f7c0e7 --- /dev/null +++ b/recipes/librispeech/tdnn_lstm_ctc.html @@ -0,0 +1,482 @@ + + + + + + + TDNN-LSTM-CTC — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

TDNN-LSTM-CTC

+

This tutorial shows you how to run a TDNN-LSTM-CTC model with the LibriSpeech dataset.

+
+

Hint

+

We assume you have read the page Installation and have setup +the environment for icefall.

+
+
+

Data preparation

+
$ cd egs/librispeech/ASR
+$ ./prepare.sh
+
+
+

The script ./prepare.sh handles the data preparation for you, automagically. +All you need to do is to run it.

+

The data preparation contains several stages, you can use the following two +options:

+
+
    +
  • --stage

  • +
  • --stop-stage

  • +
+
+

to control which stage(s) should be run. By default, all stages are executed.

+

For example,

+
$ cd egs/librispeech/ASR
+$ ./prepare.sh --stage 0 --stop-stage 0
+
+
+

means to run only stage 0.

+

To run stage 2 to stage 5, use:

+
$ ./prepare.sh --stage 2 --stop-stage 5
+
+
+

We provide the following YouTube video showing how to run ./prepare.sh.

+
+

Note

+

To get the latest news of next-gen Kaldi, please subscribe +the following YouTube channel by Nadira Povey:

+
+
+
+
+
+
+

Training

+

Now describing the training of TDNN-LSTM-CTC model, contained in +the tdnn_lstm_ctc +folder.

+

The command to run the training part is:

+
$ cd egs/librispeech/ASR
+$ export CUDA_VISIBLE_DEVICES="0,1,2,3"
+$ ./tdnn_lstm_ctc/train.py --world-size 4
+
+
+

By default, it will run 20 epochs. Training logs and checkpoints are saved +in tdnn_lstm_ctc/exp.

+

In tdnn_lstm_ctc/exp, you will find the following files:

+
+
    +
  • epoch-0.pt, epoch-1.pt, …, epoch-19.pt

    +

    These are checkpoint files, containing model state_dict and optimizer state_dict. +To resume training from some checkpoint, say epoch-10.pt, you can use:

    +
    +
    $ ./tdnn_lstm_ctc/train.py --start-epoch 11
    +
    +
    +
    +
  • +
  • tensorboard/

    +

    This folder contains TensorBoard logs. Training loss, validation loss, learning +rate, etc, are recorded in these logs. You can visualize them by:

    +
    +
    $ cd tdnn_lstm_ctc/exp/tensorboard
    +$ tensorboard dev upload --logdir . --description "TDNN LSTM training for librispeech with icefall"
    +
    +
    +
    +
  • +
  • log/log-train-xxxx

    +

    It is the detailed training log in text format, same as the one +you saw printed to the console during training.

    +
  • +
+
+

To see available training options, you can use:

+
$ ./tdnn_lstm_ctc/train.py --help
+
+
+

Other training options, e.g., learning rate, results dir, etc., are +pre-configured in the function get_params() +in tdnn_lstm_ctc/train.py. +Normally, you don’t need to change them. You can change them by modifying the code, if +you want.

+
+
+

Decoding

+

The decoding part uses checkpoints saved by the training part, so you have +to run the training part first.

+

The command for decoding is:

+
$ export CUDA_VISIBLE_DEVICES="0"
+$ ./tdnn_lstm_ctc/decode.py
+
+
+

You will see the WER in the output log.

+

Decoded results are saved in tdnn_lstm_ctc/exp.

+
$ ./tdnn_lstm_ctc/decode.py --help
+
+
+

shows you the available decoding options.

+

Some commonly used options are:

+
+
    +
  • --epoch

    +

    You can select which checkpoint to be used for decoding. +For instance, ./tdnn_lstm_ctc/decode.py --epoch 10 means to use +./tdnn_lstm_ctc/exp/epoch-10.pt for decoding.

    +
  • +
  • --avg

    +

    It’s related to model averaging. It specifies number of checkpoints +to be averaged. The averaged model is used for decoding. +For example, the following command:

    +
    +
    $ ./tdnn_lstm_ctc/decode.py --epoch 10 --avg 3
    +
    +
    +
    +

    uses the average of epoch-8.pt, epoch-9.pt and epoch-10.pt +for decoding.

    +
  • +
  • --export

    +

    If it is True, i.e., ./tdnn_lstm_ctc/decode.py --export 1, the code +will save the averaged model to tdnn_lstm_ctc/exp/pretrained.pt. +See Pre-trained Model for how to use it.

    +
  • +
+
+
+
+

Pre-trained Model

+

We have uploaded the pre-trained model to +https://huggingface.co/pkufool/icefall_asr_librispeech_tdnn-lstm_ctc.

+

The following shows you how to use the pre-trained model.

+
+

Install kaldifeat

+

kaldifeat is used to +extract features for a single sound file or multiple sound files +at the same time.

+

Please refer to https://github.com/csukuangfj/kaldifeat for installation.

+
+
+

Download the pre-trained model

+
$ cd egs/librispeech/ASR
+$ mkdir tmp
+$ cd tmp
+$ git lfs install
+$ git clone https://huggingface.co/pkufool/icefall_asr_librispeech_tdnn-lstm_ctc
+
+
+
+

Caution

+

You have to use git lfs to download the pre-trained model.

+
+
+

Caution

+

In order to use this pre-trained model, your k2 version has to be v1.7 or later.

+
+

After downloading, you will have the following files:

+
$ cd egs/librispeech/ASR
+$ tree tmp
+
+
+
tmp/
+`-- icefall_asr_librispeech_tdnn-lstm_ctc
+    |-- README.md
+    |-- data
+    |   |-- lang_phone
+    |   |   |-- HLG.pt
+    |   |   |-- tokens.txt
+    |   |   `-- words.txt
+    |   `-- lm
+    |       `-- G_4_gram.pt
+    |-- exp
+    |   `-- pretrained.pt
+    `-- test_wavs
+        |-- 1089-134686-0001.flac
+        |-- 1221-135766-0001.flac
+        |-- 1221-135766-0002.flac
+        `-- trans.txt
+
+6 directories, 10 files
+
+
+

File descriptions:

+
+
    +
  • data/lang_phone/HLG.pt

    +
    +

    It is the decoding graph.

    +
    +
  • +
  • data/lang_phone/tokens.txt

    +
    +

    It contains tokens and their IDs.

    +
    +
  • +
  • data/lang_phone/words.txt

    +
    +

    It contains words and their IDs.

    +
    +
  • +
  • data/lm/G_4_gram.pt

    +
    +

    It is a 4-gram LM, useful for LM rescoring.

    +
    +
  • +
  • exp/pretrained.pt

    +
    +

    It contains pre-trained model parameters, obtained by averaging +checkpoints from epoch-14.pt to epoch-19.pt. +Note: We have removed optimizer state_dict to reduce file size.

    +
    +
  • +
  • test_waves/*.flac

    +
    +

    It contains some test sound files from LibriSpeech test-clean dataset.

    +
    +
  • +
  • test_waves/trans.txt

    +
    +

    It contains the reference transcripts for the sound files in test_waves/.

    +
    +
  • +
+
+

The information of the test sound files is listed below:

+
$ soxi tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/*.flac
+
+Input File     : 'tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:06.62 = 106000 samples ~ 496.875 CDDA sectors
+File Size      : 116k
+Bit Rate       : 140k
+Sample Encoding: 16-bit FLAC
+
+
+Input File     : 'tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:16.71 = 267440 samples ~ 1253.62 CDDA sectors
+File Size      : 343k
+Bit Rate       : 164k
+Sample Encoding: 16-bit FLAC
+
+
+Input File     : 'tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac'
+Channels       : 1
+Sample Rate    : 16000
+Precision      : 16-bit
+Duration       : 00:00:04.83 = 77200 samples ~ 361.875 CDDA sectors
+File Size      : 105k
+Bit Rate       : 174k
+Sample Encoding: 16-bit FLAC
+
+Total Duration of 3 files: 00:00:28.16
+
+
+
+
+

Inference with a pre-trained model

+
$ cd egs/librispeech/ASR
+$ ./tdnn_lstm_ctc/pretrained.py --help
+
+
+

shows the usage information of ./tdnn_lstm_ctc/pretrained.py.

+

To decode with 1best method, we can use:

+
./tdnn_lstm_ctc/pretrained.py \
+  --checkpoint ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/exp/pretraind.pt \
+  --words-file ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/words.txt \
+  --HLG ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/HLG.pt \
+  ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac \
+  ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac \
+  ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac
+
+
+

The output is:

+
2021-08-24 16:57:13,315 INFO [pretrained.py:168] device: cuda:0
+2021-08-24 16:57:13,315 INFO [pretrained.py:170] Creating model
+2021-08-24 16:57:18,331 INFO [pretrained.py:182] Loading HLG from ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/HLG.pt
+2021-08-24 16:57:27,581 INFO [pretrained.py:199] Constructing Fbank computer
+2021-08-24 16:57:27,584 INFO [pretrained.py:209] Reading sound files: ['./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac', './tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac', './tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac']
+2021-08-24 16:57:27,599 INFO [pretrained.py:215] Decoding started
+2021-08-24 16:57:27,791 INFO [pretrained.py:245] Use HLG decoding
+2021-08-24 16:57:28,098 INFO [pretrained.py:266]
+./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac:
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS
+
+./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac:
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac:
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+
+2021-08-24 16:57:28,099 INFO [pretrained.py:268] Decoding Done
+
+
+

To decode with whole-lattice-rescoring methond, you can use

+
./tdnn_lstm_ctc/pretrained.py \
+  --checkpoint ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/exp/pretraind.pt \
+  --words-file ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/words.txt \
+  --HLG ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/HLG.pt \
+  --method whole-lattice-rescoring \
+  --G ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lm/G_4_gram.pt \
+  --ngram-lm-scale 0.8 \
+  ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac \
+  ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac \
+  ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac
+
+
+

The decoding output is:

+
2021-08-24 16:39:24,725 INFO [pretrained.py:168] device: cuda:0
+2021-08-24 16:39:24,725 INFO [pretrained.py:170] Creating model
+2021-08-24 16:39:29,403 INFO [pretrained.py:182] Loading HLG from ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lang_phone/HLG.pt
+2021-08-24 16:39:40,631 INFO [pretrained.py:190] Loading G from ./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/data/lm/G_4_gram.pt
+2021-08-24 16:39:53,098 INFO [pretrained.py:199] Constructing Fbank computer
+2021-08-24 16:39:53,107 INFO [pretrained.py:209] Reading sound files: ['./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac', './tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac', './tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac']
+2021-08-24 16:39:53,121 INFO [pretrained.py:215] Decoding started
+2021-08-24 16:39:53,443 INFO [pretrained.py:250] Use HLG decoding + LM rescoring
+2021-08-24 16:39:54,010 INFO [pretrained.py:266]
+./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1089-134686-0001.flac:
+AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS
+
+./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0001.flac:
+GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONORED BOSOM TO CONNECT HER PARENT FOREVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN
+
+./tmp/icefall_asr_librispeech_tdnn-lstm_ctc/test_wavs/1221-135766-0002.flac:
+YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION
+
+
+2021-08-24 16:39:54,010 INFO [pretrained.py:268] Decoding Done
+
+
+
+
+
+

Colab notebook

+

We provide a colab notebook for decoding with pre-trained model.

+

librispeech tdnn_lstm_ctc colab notebook

+

Congratulations! You have finished the TDNN-LSTM-CTC recipe on librispeech in icefall.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/timit/index.html b/recipes/timit/index.html new file mode 100644 index 000000000..fc90b5a52 --- /dev/null +++ b/recipes/timit/index.html @@ -0,0 +1,125 @@ + + + + + + + TIMIT — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

TIMIT

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/timit/tdnn_ligru_ctc.html b/recipes/timit/tdnn_ligru_ctc.html new file mode 100644 index 000000000..edf14200b --- /dev/null +++ b/recipes/timit/tdnn_ligru_ctc.html @@ -0,0 +1,483 @@ + + + + + + + TDNN-LiGRU-CTC — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

TDNN-LiGRU-CTC

+

This tutorial shows you how to run a TDNN-LiGRU-CTC model with the TIMIT dataset.

+
+

Hint

+

We assume you have read the page Installation and have setup +the environment for icefall.

+
+
+

Data preparation

+
$ cd egs/timit/ASR
+$ ./prepare.sh
+
+
+

The script ./prepare.sh handles the data preparation for you, automagically. +All you need to do is to run it.

+

The data preparation contains several stages, you can use the following two +options:

+
+
    +
  • --stage

  • +
  • --stop-stage

  • +
+
+

to control which stage(s) should be run. By default, all stages are executed.

+

For example,

+
$ cd egs/timit/ASR
+$ ./prepare.sh --stage 0 --stop-stage 0
+
+
+

means to run only stage 0.

+

To run stage 2 to stage 5, use:

+
$ ./prepare.sh --stage 2 --stop-stage 5
+
+
+
+
+

Training

+

Now describing the training of TDNN-LiGRU-CTC model, contained in +the tdnn_ligru_ctc +folder.

+
+

Hint

+

TIMIT is a very small dataset. So one GPU is enough.

+
+

The command to run the training part is:

+
$ cd egs/timit/ASR
+$ export CUDA_VISIBLE_DEVICES="0"
+$ ./tdnn_ligru_ctc/train.py
+
+
+

By default, it will run 25 epochs. Training logs and checkpoints are saved +in tdnn_ligru_ctc/exp.

+

In tdnn_ligru_ctc/exp, you will find the following files:

+
+
    +
  • epoch-0.pt, epoch-1.pt, …, epoch-29.pt

    +

    These are checkpoint files, containing model state_dict and optimizer state_dict. +To resume training from some checkpoint, say epoch-10.pt, you can use:

    +
    +
    $ ./tdnn_ligru_ctc/train.py --start-epoch 11
    +
    +
    +
    +
  • +
  • tensorboard/

    +

    This folder contains TensorBoard logs. Training loss, validation loss, learning +rate, etc, are recorded in these logs. You can visualize them by:

    +
    +
    $ cd tdnn_ligru_ctc/exp/tensorboard
    +$ tensorboard dev upload --logdir . --description "TDNN ligru training for timit with icefall"
    +
    +
    +
    +
  • +
  • log/log-train-xxxx

    +

    It is the detailed training log in text format, same as the one +you saw printed to the console during training.

    +
  • +
+
+

To see available training options, you can use:

+
$ ./tdnn_ligru_ctc/train.py --help
+
+
+

Other training options, e.g., learning rate, results dir, etc., are +pre-configured in the function get_params() +in tdnn_ligru_ctc/train.py. +Normally, you don’t need to change them. You can change them by modifying the code, if +you want.

+
+
+

Decoding

+

The decoding part uses checkpoints saved by the training part, so you have +to run the training part first.

+

The command for decoding is:

+
$ export CUDA_VISIBLE_DEVICES="0"
+$ ./tdnn_ligru_ctc/decode.py
+
+
+

You will see the WER in the output log.

+

Decoded results are saved in tdnn_ligru_ctc/exp.

+
$ ./tdnn_ligru_ctc/decode.py --help
+
+
+

shows you the available decoding options.

+

Some commonly used options are:

+
+
    +
  • --epoch

    +

    You can select which checkpoint to be used for decoding. +For instance, ./tdnn_ligru_ctc/decode.py --epoch 10 means to use +./tdnn_ligru_ctc/exp/epoch-10.pt for decoding.

    +
  • +
  • --avg

    +

    It’s related to model averaging. It specifies number of checkpoints +to be averaged. The averaged model is used for decoding. +For example, the following command:

    +
    +
    $ ./tdnn_ligru_ctc/decode.py --epoch 25 --avg 17
    +
    +
    +
    +

    uses the average of epoch-9.pt, epoch-10.pt, epoch-11.pt, +epoch-12.pt, epoch-13.pt, epoch-14.pt, epoch-15.pt, +epoch-16.pt, epoch-17.pt, epoch-18.pt, epoch-19.pt, +epoch-20.pt, epoch-21.pt, epoch-22.pt, epoch-23.pt, +epoch-24.pt and epoch-25.pt +for decoding.

    +
  • +
  • --export

    +

    If it is True, i.e., ./tdnn_ligru_ctc/decode.py --export 1, the code +will save the averaged model to tdnn_ligru_ctc/exp/pretrained.pt. +See Pre-trained Model for how to use it.

    +
  • +
+
+
+
+

Pre-trained Model

+

We have uploaded the pre-trained model to +https://huggingface.co/luomingshuang/icefall_asr_timit_tdnn_ligru_ctc.

+

The following shows you how to use the pre-trained model.

+
+

Install kaldifeat

+

kaldifeat is used to +extract features for a single sound file or multiple sound files +at the same time.

+

Please refer to https://github.com/csukuangfj/kaldifeat for installation.

+
+
+

Download the pre-trained model

+
$ cd egs/timit/ASR
+$ mkdir tmp-ligru
+$ cd tmp-ligru
+$ git lfs install
+$ git clone https://huggingface.co/luomingshuang/icefall_asr_timit_tdnn_ligru_ctc
+
+
+
+

Caution

+

You have to use git lfs to download the pre-trained model.

+
+
+

Caution

+

In order to use this pre-trained model, your k2 version has to be v1.7 or later.

+
+

After downloading, you will have the following files:

+
$ cd egs/timit/ASR
+$ tree tmp-ligru
+
+
+
tmp-ligru/
+`-- icefall_asr_timit_tdnn_ligru_ctc
+    |-- README.md
+    |-- data
+    |   |-- lang_phone
+    |   |   |-- HLG.pt
+    |   |   |-- tokens.txt
+    |   |   `-- words.txt
+    |   `-- lm
+    |       `-- G_4_gram.pt
+    |-- exp
+    |   `-- pretrained_average_9_25.pt
+    `-- test_wavs
+        |-- FDHC0_SI1559.WAV
+        |-- FELC0_SI756.WAV
+        |-- FMGD0_SI1564.WAV
+        `-- trans.txt
+
+6 directories, 10 files
+
+
+

File descriptions:

+
+
    +
  • data/lang_phone/HLG.pt

    +
    +

    It is the decoding graph.

    +
    +
  • +
  • data/lang_phone/tokens.txt

    +
    +

    It contains tokens and their IDs.

    +
    +
  • +
  • data/lang_phone/words.txt

    +
    +

    It contains words and their IDs.

    +
    +
  • +
  • data/lm/G_4_gram.pt

    +
    +

    It is a 4-gram LM, useful for LM rescoring.

    +
    +
  • +
  • exp/pretrained.pt

    +
    +

    It contains pre-trained model parameters, obtained by averaging +checkpoints from epoch-9.pt to epoch-25.pt. +Note: We have removed optimizer state_dict to reduce file size.

    +
    +
  • +
  • test_waves/*.WAV

    +
    +

    It contains some test sound files from timit TEST dataset.

    +
    +
  • +
  • test_waves/trans.txt

    +
    +

    It contains the reference transcripts for the sound files in test_waves/.

    +
    +
  • +
+
+

The information of the test sound files is listed below:

+
$ ffprobe -show_format tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV
+
+Input #0, nistsphere, from 'tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV':
+Metadata:
+  database_id     : TIMIT
+  database_version: 1.0
+  utterance_id    : dhc0_si1559
+  sample_min      : -4176
+  sample_max      : 5984
+Duration: 00:00:03.40, bitrate: 258 kb/s
+  Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s
+
+$ ffprobe -show_format tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV
+
+Input #0, nistsphere, from 'tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV':
+Metadata:
+  database_id     : TIMIT
+  database_version: 1.0
+  utterance_id    : elc0_si756
+  sample_min      : -1546
+  sample_max      : 1989
+Duration: 00:00:04.19, bitrate: 257 kb/s
+  Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s
+
+$ ffprobe -show_format tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV
+
+Input #0, nistsphere, from 'tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV':
+Metadata:
+  database_id     : TIMIT
+  database_version: 1.0
+  utterance_id    : mgd0_si1564
+  sample_min      : -7626
+  sample_max      : 10573
+Duration: 00:00:04.44, bitrate: 257 kb/s
+  Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s
+
+
+
+
+

Inference with a pre-trained model

+
$ cd egs/timit/ASR
+$ ./tdnn_ligru_ctc/pretrained.py --help
+
+
+

shows the usage information of ./tdnn_ligru_ctc/pretrained.py.

+

To decode with 1best method, we can use:

+
./tdnn_ligru_ctc/pretrained.py
+  --method 1best
+  --checkpoint ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/exp/pretrained_average_9_25.pt
+  --words-file ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/words.txt
+  --HLG ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/HLG.pt
+  ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV
+  ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV
+  ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV
+
+
+

The output is:

+
2021-11-08 20:41:33,660 INFO [pretrained.py:169] device: cuda:0
+2021-11-08 20:41:33,660 INFO [pretrained.py:171] Creating model
+2021-11-08 20:41:38,680 INFO [pretrained.py:183] Loading HLG from ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/HLG.pt
+2021-11-08 20:41:38,695 INFO [pretrained.py:200] Constructing Fbank computer
+2021-11-08 20:41:38,697 INFO [pretrained.py:210] Reading sound files: ['./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV', './tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV', './tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV']
+2021-11-08 20:41:38,704 INFO [pretrained.py:216] Decoding started
+2021-11-08 20:41:39,819 INFO [pretrained.py:246] Use HLG decoding
+2021-11-08 20:41:39,829 INFO [pretrained.py:267]
+./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV:
+sil dh ih sh uw ah l iy v iy z ih sil p r aa sil k s ih m ey dx ih sil d w uh dx ih w ih s f iy l ih ng w ih th ih n ih m s eh l f sil jh
+
+./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV:
+sil m ih sil t ih r iy s sil s er r ih m ih sil m aa l ih sil k l ey sil r eh sil d w ay sil d aa r sil b ah f sil jh
+
+./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV:
+sil hh ah z sil b ih sil g r iy w ah z sil d aw n ih sil b ay s sil n ey sil w eh l f eh n s ih z eh n dh eh r w er sil g r ey z ih ng sil k ae dx l sil
+
+
+2021-11-08 20:41:39,829 INFO [pretrained.py:269] Decoding Done
+
+
+

To decode with whole-lattice-rescoring methond, you can use

+
./tdnn_ligru_ctc/pretrained.py \
+  --method whole-lattice-rescoring \
+  --checkpoint ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/exp/pretrained_average_9_25.pt \
+  --words-file ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/words.txt \
+  --HLG ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/HLG.pt \
+  --G ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lm/G_4_gram.pt \
+  --ngram-lm-scale 0.1 \
+  ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV
+  ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV
+  ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV
+
+
+

The decoding output is:

+
2021-11-08 20:37:50,693 INFO [pretrained.py:169] device: cuda:0
+2021-11-08 20:37:50,693 INFO [pretrained.py:171] Creating model
+2021-11-08 20:37:54,693 INFO [pretrained.py:183] Loading HLG from ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lang_phone/HLG.pt
+2021-11-08 20:37:54,705 INFO [pretrained.py:191] Loading G from ./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/data/lm/G_4_gram.pt
+2021-11-08 20:37:54,714 INFO [pretrained.py:200] Constructing Fbank computer
+2021-11-08 20:37:54,715 INFO [pretrained.py:210] Reading sound files: ['./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV', './tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV', './tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV']
+2021-11-08 20:37:54,720 INFO [pretrained.py:216] Decoding started
+2021-11-08 20:37:55,808 INFO [pretrained.py:251] Use HLG decoding + LM rescoring
+2021-11-08 20:37:56,348 INFO [pretrained.py:267]
+./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FDHC0_SI1559.WAV:
+sil dh ih sh uw ah l iy v iy z ah sil p r aa sil k s ih m ey dx ih sil d w uh dx iy w ih s f iy l iy ng w ih th ih n ih m s eh l f sil jh
+
+./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FELC0_SI756.WAV:
+sil m ih sil t ih r iy l s sil s er r eh m ih sil m aa l ih ng sil k l ey sil r eh sil d w ay sil d aa r sil b ah f sil jh ch
+
+./tmp-ligru/icefall_asr_timit_tdnn_ligru_ctc/test_waves/FMGD0_SI1564.WAV:
+sil hh ah z sil b ih n sil g r iy w ah z sil b aw n ih sil b ay s sil n ey sil w er l f eh n s ih z eh n dh eh r w er sil g r ey z ih ng sil k ae dx l sil
+
+
+2021-11-08 20:37:56,348 INFO [pretrained.py:269] Decoding Done
+
+
+
+
+
+

Colab notebook

+

We provide a colab notebook for decoding with pre-trained model.

+

timit tdnn_ligru_ctc colab notebook

+

Congratulations! You have finished the TDNN-LiGRU-CTC recipe on timit in icefall.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/timit/tdnn_lstm_ctc.html b/recipes/timit/tdnn_lstm_ctc.html new file mode 100644 index 000000000..e90ec3e4f --- /dev/null +++ b/recipes/timit/tdnn_lstm_ctc.html @@ -0,0 +1,479 @@ + + + + + + + TDNN-LSTM-CTC — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

TDNN-LSTM-CTC

+

This tutorial shows you how to run a TDNN-LSTM-CTC model with the TIMIT dataset.

+
+

Hint

+

We assume you have read the page Installation and have setup +the environment for icefall.

+
+
+

Data preparation

+
$ cd egs/timit/ASR
+$ ./prepare.sh
+
+
+

The script ./prepare.sh handles the data preparation for you, automagically. +All you need to do is to run it.

+

The data preparation contains several stages, you can use the following two +options:

+
+
    +
  • --stage

  • +
  • --stop-stage

  • +
+
+

to control which stage(s) should be run. By default, all stages are executed.

+

For example,

+
$ cd egs/timit/ASR
+$ ./prepare.sh --stage 0 --stop-stage 0
+
+
+

means to run only stage 0.

+

To run stage 2 to stage 5, use:

+
$ ./prepare.sh --stage 2 --stop-stage 5
+
+
+
+
+

Training

+

Now describing the training of TDNN-LSTM-CTC model, contained in +the tdnn_lstm_ctc +folder.

+
+

Hint

+

TIMIT is a very small dataset. So one GPU for training is enough.

+
+

The command to run the training part is:

+
$ cd egs/timit/ASR
+$ export CUDA_VISIBLE_DEVICES="0"
+$ ./tdnn_lstm_ctc/train.py
+
+
+

By default, it will run 25 epochs. Training logs and checkpoints are saved +in tdnn_lstm_ctc/exp.

+

In tdnn_lstm_ctc/exp, you will find the following files:

+
+
    +
  • epoch-0.pt, epoch-1.pt, …, epoch-29.pt

    +

    These are checkpoint files, containing model state_dict and optimizer state_dict. +To resume training from some checkpoint, say epoch-10.pt, you can use:

    +
    +
    $ ./tdnn_lstm_ctc/train.py --start-epoch 11
    +
    +
    +
    +
  • +
  • tensorboard/

    +

    This folder contains TensorBoard logs. Training loss, validation loss, learning +rate, etc, are recorded in these logs. You can visualize them by:

    +
    +
    $ cd tdnn_lstm_ctc/exp/tensorboard
    +$ tensorboard dev upload --logdir . --description "TDNN LSTM training for timit with icefall"
    +
    +
    +
    +
  • +
  • log/log-train-xxxx

    +

    It is the detailed training log in text format, same as the one +you saw printed to the console during training.

    +
  • +
+
+

To see available training options, you can use:

+
$ ./tdnn_lstm_ctc/train.py --help
+
+
+

Other training options, e.g., learning rate, results dir, etc., are +pre-configured in the function get_params() +in tdnn_lstm_ctc/train.py. +Normally, you don’t need to change them. You can change them by modifying the code, if +you want.

+
+
+

Decoding

+

The decoding part uses checkpoints saved by the training part, so you have +to run the training part first.

+

The command for decoding is:

+
$ export CUDA_VISIBLE_DEVICES="0"
+$ ./tdnn_lstm_ctc/decode.py
+
+
+

You will see the WER in the output log.

+

Decoded results are saved in tdnn_lstm_ctc/exp.

+
$ ./tdnn_lstm_ctc/decode.py --help
+
+
+

shows you the available decoding options.

+

Some commonly used options are:

+
+
    +
  • --epoch

    +

    You can select which checkpoint to be used for decoding. +For instance, ./tdnn_lstm_ctc/decode.py --epoch 10 means to use +./tdnn_lstm_ctc/exp/epoch-10.pt for decoding.

    +
  • +
  • --avg

    +

    It’s related to model averaging. It specifies number of checkpoints +to be averaged. The averaged model is used for decoding. +For example, the following command:

    +
    +
    $ ./tdnn_lstm_ctc/decode.py --epoch 25 --avg 10
    +
    +
    +
    +

    uses the average of epoch-16.pt, epoch-17.pt, epoch-18.pt, +epoch-19.pt, epoch-20.pt, epoch-21.pt, epoch-22.pt, +epoch-23.pt, epoch-24.pt and epoch-25.pt +for decoding.

    +
  • +
  • --export

    +

    If it is True, i.e., ./tdnn_lstm_ctc/decode.py --export 1, the code +will save the averaged model to tdnn_lstm_ctc/exp/pretrained.pt. +See Pre-trained Model for how to use it.

    +
  • +
+
+
+
+

Pre-trained Model

+

We have uploaded the pre-trained model to +https://huggingface.co/luomingshuang/icefall_asr_timit_tdnn_lstm_ctc.

+

The following shows you how to use the pre-trained model.

+
+

Install kaldifeat

+

kaldifeat is used to +extract features for a single sound file or multiple sound files +at the same time.

+

Please refer to https://github.com/csukuangfj/kaldifeat for installation.

+
+
+

Download the pre-trained model

+
$ cd egs/timit/ASR
+$ mkdir tmp-lstm
+$ cd tmp-lstm
+$ git lfs install
+$ git clone https://huggingface.co/luomingshuang/icefall_asr_timit_tdnn_lstm_ctc
+
+
+
+

Caution

+

You have to use git lfs to download the pre-trained model.

+
+
+

Caution

+

In order to use this pre-trained model, your k2 version has to be v1.7 or later.

+
+

After downloading, you will have the following files:

+
$ cd egs/timit/ASR
+$ tree tmp-lstm
+
+
+
tmp-lstm/
+`-- icefall_asr_timit_tdnn_lstm_ctc
+    |-- README.md
+    |-- data
+    |   |-- lang_phone
+    |   |   |-- HLG.pt
+    |   |   |-- tokens.txt
+    |   |   `-- words.txt
+    |   `-- lm
+    |       `-- G_4_gram.pt
+    |-- exp
+    |   `-- pretrained_average_16_25.pt
+    `-- test_wavs
+        |-- FDHC0_SI1559.WAV
+        |-- FELC0_SI756.WAV
+        |-- FMGD0_SI1564.WAV
+        `-- trans.txt
+
+6 directories, 10 files
+
+
+

File descriptions:

+
+
    +
  • data/lang_phone/HLG.pt

    +
    +

    It is the decoding graph.

    +
    +
  • +
  • data/lang_phone/tokens.txt

    +
    +

    It contains tokens and their IDs.

    +
    +
  • +
  • data/lang_phone/words.txt

    +
    +

    It contains words and their IDs.

    +
    +
  • +
  • data/lm/G_4_gram.pt

    +
    +

    It is a 4-gram LM, useful for LM rescoring.

    +
    +
  • +
  • exp/pretrained.pt

    +
    +

    It contains pre-trained model parameters, obtained by averaging +checkpoints from epoch-16.pt to epoch-25.pt. +Note: We have removed optimizer state_dict to reduce file size.

    +
    +
  • +
  • test_waves/*.WAV

    +
    +

    It contains some test sound files from timit TEST dataset.

    +
    +
  • +
  • test_waves/trans.txt

    +
    +

    It contains the reference transcripts for the sound files in test_waves/.

    +
    +
  • +
+
+

The information of the test sound files is listed below:

+
$ ffprobe -show_format tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV
+
+Input #0, nistsphere, from 'tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV':
+Metadata:
+  database_id     : TIMIT
+  database_version: 1.0
+  utterance_id    : dhc0_si1559
+  sample_min      : -4176
+  sample_max      : 5984
+Duration: 00:00:03.40, bitrate: 258 kb/s
+  Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s
+
+$ ffprobe -show_format tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV
+
+Input #0, nistsphere, from 'tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV':
+Metadata:
+  database_id     : TIMIT
+  database_version: 1.0
+  utterance_id    : elc0_si756
+  sample_min      : -1546
+  sample_max      : 1989
+Duration: 00:00:04.19, bitrate: 257 kb/s
+  Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s
+
+$ ffprobe -show_format tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV
+
+Input #0, nistsphere, from 'tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV':
+Metadata:
+  database_id     : TIMIT
+  database_version: 1.0
+  utterance_id    : mgd0_si1564
+  sample_min      : -7626
+  sample_max      : 10573
+Duration: 00:00:04.44, bitrate: 257 kb/s
+  Stream #0:0: Audio: pcm_s16le, 16000 Hz, 1 channels, s16, 256 kb/s
+
+
+
+
+

Inference with a pre-trained model

+
$ cd egs/timit/ASR
+$ ./tdnn_lstm_ctc/pretrained.py --help
+
+
+

shows the usage information of ./tdnn_lstm_ctc/pretrained.py.

+

To decode with 1best method, we can use:

+
./tdnn_lstm_ctc/pretrained.py
+  --method 1best
+  --checkpoint ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/exp/pretrained_average_16_25.pt
+  --words-file ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/words.txt
+  --HLG ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/HLG.pt
+  ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV
+  ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV
+  ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV
+
+
+

The output is:

+
2021-11-08 21:02:49,583 INFO [pretrained.py:169] device: cuda:0
+2021-11-08 21:02:49,584 INFO [pretrained.py:171] Creating model
+2021-11-08 21:02:53,816 INFO [pretrained.py:183] Loading HLG from ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/HLG.pt
+2021-11-08 21:02:53,827 INFO [pretrained.py:200] Constructing Fbank computer
+2021-11-08 21:02:53,827 INFO [pretrained.py:210] Reading sound files: ['./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV', './tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV', './tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV']
+2021-11-08 21:02:53,831 INFO [pretrained.py:216] Decoding started
+2021-11-08 21:02:54,380 INFO [pretrained.py:246] Use HLG decoding
+2021-11-08 21:02:54,387 INFO [pretrained.py:267]
+./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV:
+sil dh ih sh uw ah l iy v iy z ih sil p r aa sil k s ih m ey dx ih sil d w uh dx iy w ih s f iy l iy w ih th ih n ih m s eh l f sil jh
+
+./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV:
+sil dh ih sil t ih r ih s sil s er r ih m ih sil m aa l ih ng sil k l ey sil r eh sil d w ay sil d aa r sil b ah f sil <UNK> jh
+
+./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV:
+sil hh ae z sil b ih n iy w ah z sil b ae n ih sil b ay s sil n ey sil k eh l f eh n s ih z eh n dh eh r w er sil g r ey z ih ng sil k ae dx l sil
+
+
+2021-11-08 21:02:54,387 INFO [pretrained.py:269] Decoding Done
+
+
+

To decode with whole-lattice-rescoring methond, you can use

+
./tdnn_lstm_ctc/pretrained.py \
+  --method whole-lattice-rescoring \
+  --checkpoint ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/exp/pretrained_average_16_25.pt \
+  --words-file ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/words.txt \
+  --HLG ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/HLG.pt \
+  --G ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lm/G_4_gram.pt \
+  --ngram-lm-scale 0.08 \
+  ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV
+  ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV
+  ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV
+
+
+

The decoding output is:

+
2021-11-08 20:05:22,739 INFO [pretrained.py:169] device: cuda:0
+2021-11-08 20:05:22,739 INFO [pretrained.py:171] Creating model
+2021-11-08 20:05:26,959 INFO [pretrained.py:183] Loading HLG from ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lang_phone/HLG.pt
+2021-11-08 20:05:26,971 INFO [pretrained.py:191] Loading G from ./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/data/lm/G_4_gram.pt
+2021-11-08 20:05:26,977 INFO [pretrained.py:200] Constructing Fbank computer
+2021-11-08 20:05:26,978 INFO [pretrained.py:210] Reading sound files: ['./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV', './tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV', './tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV']
+2021-11-08 20:05:26,981 INFO [pretrained.py:216] Decoding started
+2021-11-08 20:05:27,519 INFO [pretrained.py:251] Use HLG decoding + LM rescoring
+2021-11-08 20:05:27,878 INFO [pretrained.py:267]
+./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FDHC0_SI1559.WAV:
+sil dh ih sh uw l iy v iy z ih sil p r aa sil k s ah m ey dx ih sil w uh dx iy w ih s f iy l ih ng w ih th ih n ih m s eh l f sil jh
+
+./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FELC0_SI756.WAV:
+sil dh ih sil t ih r iy ih s sil s er r eh m ih sil n ah l ih ng sil k l ey sil r eh sil d w ay sil d aa r sil b ow f sil jh
+
+./tmp-lstm/icefall_asr_timit_tdnn_lstm_ctc/test_waves/FMGD0_SI1564.WAV:
+sil hh ah z sil b ih n iy w ah z sil b ae n ih sil b ay s sil n ey sil k ih l f eh n s ih z eh n dh eh r w er sil g r ey z ih n sil k ae dx l sil
+
+
+

2021-11-08 20:05:27,878 INFO [pretrained.py:269] Decoding Done

+
+
+
+

Colab notebook

+

We provide a colab notebook for decoding with pre-trained model.

+

timit tdnn_lstm_ctc colab notebook

+

Congratulations! You have finished the TDNN-LSTM-CTC recipe on timit in icefall.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/yesno/index.html b/recipes/yesno/index.html new file mode 100644 index 000000000..ad9ea643a --- /dev/null +++ b/recipes/yesno/index.html @@ -0,0 +1,123 @@ + + + + + + + YesNo — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

YesNo

+
+ +
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/recipes/yesno/tdnn.html b/recipes/yesno/tdnn.html new file mode 100644 index 000000000..56b6c047f --- /dev/null +++ b/recipes/yesno/tdnn.html @@ -0,0 +1,575 @@ + + + + + + + TDNN-CTC — icefall 0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

TDNN-CTC

+

This page shows you how to run the yesno recipe. It contains:

+
+
    +
    1. +
    2. Prepare data for training

    3. +
    +
  • +
    1. +
    2. Train a TDNN model

    3. +
    +
      +
      1. +
      2. View text format logs and visualize TensorBoard logs

      3. +
      +
    • +
      1. +
      2. Select device type, i.e., CPU and GPU, for training

      3. +
      +
    • +
      1. +
      2. Change training options

      3. +
      +
    • +
      1. +
      2. Resume training from a checkpoint

      3. +
      +
    • +
    +
  • +
    1. +
    2. Decode with a trained model

    3. +
    +
      +
      1. +
      2. Select a checkpoint for decoding

      3. +
      +
    • +
      1. +
      2. Model averaging

      3. +
      +
    • +
    +
  • +
    1. +
    2. Colab notebook

    3. +
    +
      +
      1. +
      2. It shows you step by step how to setup the environment, how to do training, +and how to do decoding

      3. +
      +
    • +
      1. +
      2. How to use a pre-trained model

      3. +
      +
    • +
    +
  • +
    1. +
    2. Inference with a pre-trained model

    3. +
    +
      +
      1. +
      2. Download a pre-trained model, provided by us

      3. +
      +
    • +
      1. +
      2. Decode a single sound file with a pre-trained model

      3. +
      +
    • +
      1. +
      2. Decode multiple sound files at the same time

      3. +
      +
    • +
    +
  • +
+
+

It does NOT show you:

+
+
    +
    1. +
    2. How to train with multiple GPUs

    3. +
    +

    The yesno dataset is so small that CPU is more than enough +for training as well as for decoding.

    +
  • +
    1. +
    2. How to use LM rescoring for decoding

    3. +
    +

    The dataset does not have an LM for rescoring.

    +
  • +
+
+
+

Hint

+

We assume you have read the page Installation and have setup +the environment for icefall.

+
+
+

Hint

+

You don’t need a GPU to run this recipe. It can be run on a CPU. +The training part takes less than 30 seconds on a CPU and you will get +the following WER at the end:

+
[test_set] %WER 0.42% [1 / 240, 0 ins, 1 del, 0 sub ]
+
+
+
+
+

Data preparation

+
$ cd egs/yesno/ASR
+$ ./prepare.sh
+
+
+

The script ./prepare.sh handles the data preparation for you, automagically. +All you need to do is to run it.

+

The data preparation contains several stages, you can use the following two +options:

+
+
    +
  • --stage

  • +
  • --stop-stage

  • +
+
+

to control which stage(s) should be run. By default, all stages are executed.

+

For example,

+
$ cd egs/yesno/ASR
+$ ./prepare.sh --stage 0 --stop-stage 0
+
+
+

means to run only stage 0.

+

To run stage 2 to stage 5, use:

+
$ ./prepare.sh --stage 2 --stop-stage 5
+
+
+
+
+

Training

+

We provide only a TDNN model, contained in +the tdnn +folder, for yesno.

+

The command to run the training part is:

+
$ cd egs/yesno/ASR
+$ export CUDA_VISIBLE_DEVICES=""
+$ ./tdnn/train.py
+
+
+

By default, it will run 15 epochs. Training logs and checkpoints are saved +in tdnn/exp.

+

In tdnn/exp, you will find the following files:

+
+
    +
  • epoch-0.pt, epoch-1.pt, …

    +

    These are checkpoint files, containing model state_dict and optimizer state_dict. +To resume training from some checkpoint, say epoch-10.pt, you can use:

    +
    +
    $ ./tdnn/train.py --start-epoch 11
    +
    +
    +
    +
  • +
  • tensorboard/

    +

    This folder contains TensorBoard logs. Training loss, validation loss, learning +rate, etc, are recorded in these logs. You can visualize them by:

    +
    +
    $ cd tdnn/exp/tensorboard
    +$ tensorboard dev upload --logdir . --description "TDNN training for yesno with icefall"
    +
    +
    +
    +

    It will print something like below:

    +
    +
    TensorFlow installation not found - running with reduced feature set.
    +Upload started and will continue reading any new data as it's added to the logdir.
    +
    +To stop uploading, press Ctrl-C.
    +
    +New experiment created. View your TensorBoard at: https://tensorboard.dev/experiment/yKUbhb5wRmOSXYkId1z9eg/
    +
    +[2021-08-23T23:49:41] Started scanning logdir.
    +[2021-08-23T23:49:42] Total uploaded: 135 scalars, 0 tensors, 0 binary objects
    +Listening for new data in logdir...
    +
    +
    +
    +

    Note there is a URL in the above output, click it and you will see +the following screenshot:

    +
    +
    +TensorBoard screenshot +
    +

    Fig. 5 TensorBoard screenshot.

    +
    +
    +
    +
  • +
  • log/log-train-xxxx

    +

    It is the detailed training log in text format, same as the one +you saw printed to the console during training.

    +
  • +
+
+
+

Note

+

By default, ./tdnn/train.py uses GPU 0 for training if GPUs are available. +If you have two GPUs, say, GPU 0 and GPU 1, and you want to use GPU 1 for +training, you can run:

+
+
$ export CUDA_VISIBLE_DEVICES="1"
+$ ./tdnn/train.py
+
+
+
+

Since the yesno dataset is very small, containing only 30 sound files +for training, and the model in use is also very small, we use:

+
+
$ export CUDA_VISIBLE_DEVICES=""
+
+
+
+

so that ./tdnn/train.py uses CPU during training.

+

If you don’t have GPUs, then you don’t need to +run export CUDA_VISIBLE_DEVICES="".

+
+

To see available training options, you can use:

+
$ ./tdnn/train.py --help
+
+
+

Other training options, e.g., learning rate, results dir, etc., are +pre-configured in the function get_params() +in tdnn/train.py. +Normally, you don’t need to change them. You can change them by modifying the code, if +you want.

+
+
+

Decoding

+

The decoding part uses checkpoints saved by the training part, so you have +to run the training part first.

+

The command for decoding is:

+
$ export CUDA_VISIBLE_DEVICES=""
+$ ./tdnn/decode.py
+
+
+

You will see the WER in the output log.

+

Decoded results are saved in tdnn/exp.

+
$ ./tdnn/decode.py --help
+
+
+

shows you the available decoding options.

+

Some commonly used options are:

+
+
    +
  • --epoch

    +

    You can select which checkpoint to be used for decoding. +For instance, ./tdnn/decode.py --epoch 10 means to use +./tdnn/exp/epoch-10.pt for decoding.

    +
  • +
  • --avg

    +

    It’s related to model averaging. It specifies number of checkpoints +to be averaged. The averaged model is used for decoding. +For example, the following command:

    +
    +
    $ ./tdnn/decode.py --epoch 10 --avg 3
    +
    +
    +
    +

    uses the average of epoch-8.pt, epoch-9.pt and epoch-10.pt +for decoding.

    +
  • +
  • --export

    +

    If it is True, i.e., ./tdnn/decode.py --export 1, the code +will save the averaged model to tdnn/exp/pretrained.pt. +See Pre-trained Model for how to use it.

    +
  • +
+
+
+
+

Pre-trained Model

+

We have uploaded the pre-trained model to +https://huggingface.co/csukuangfj/icefall_asr_yesno_tdnn.

+

The following shows you how to use the pre-trained model.

+
+

Download the pre-trained model

+
$ cd egs/yesno/ASR
+$ mkdir tmp
+$ cd tmp
+$ git lfs install
+$ git clone https://huggingface.co/csukuangfj/icefall_asr_yesno_tdnn
+
+
+
+

Caution

+

You have to use git lfs to download the pre-trained model.

+
+

After downloading, you will have the following files:

+
$ cd egs/yesno/ASR
+$ tree tmp
+
+
+
tmp/
+`-- icefall_asr_yesno_tdnn
+    |-- README.md
+    |-- lang_phone
+    |   |-- HLG.pt
+    |   |-- L.pt
+    |   |-- L_disambig.pt
+    |   |-- Linv.pt
+    |   |-- lexicon.txt
+    |   |-- lexicon_disambig.txt
+    |   |-- tokens.txt
+    |   `-- words.txt
+    |-- lm
+    |   |-- G.arpa
+    |   `-- G.fst.txt
+    |-- pretrained.pt
+    `-- test_waves
+        |-- 0_0_0_1_0_0_0_1.wav
+        |-- 0_0_1_0_0_0_1_0.wav
+        |-- 0_0_1_0_0_1_1_1.wav
+        |-- 0_0_1_0_1_0_0_1.wav
+        |-- 0_0_1_1_0_0_0_1.wav
+        |-- 0_0_1_1_0_1_1_0.wav
+        |-- 0_0_1_1_1_0_0_0.wav
+        |-- 0_0_1_1_1_1_0_0.wav
+        |-- 0_1_0_0_0_1_0_0.wav
+        |-- 0_1_0_0_1_0_1_0.wav
+        |-- 0_1_0_1_0_0_0_0.wav
+        |-- 0_1_0_1_1_1_0_0.wav
+        |-- 0_1_1_0_0_1_1_1.wav
+        |-- 0_1_1_1_0_0_1_0.wav
+        |-- 0_1_1_1_1_0_1_0.wav
+        |-- 1_0_0_0_0_0_0_0.wav
+        |-- 1_0_0_0_0_0_1_1.wav
+        |-- 1_0_0_1_0_1_1_1.wav
+        |-- 1_0_1_1_0_1_1_1.wav
+        |-- 1_0_1_1_1_1_0_1.wav
+        |-- 1_1_0_0_0_1_1_1.wav
+        |-- 1_1_0_0_1_0_1_1.wav
+        |-- 1_1_0_1_0_1_0_0.wav
+        |-- 1_1_0_1_1_0_0_1.wav
+        |-- 1_1_0_1_1_1_1_0.wav
+        |-- 1_1_1_0_0_1_0_1.wav
+        |-- 1_1_1_0_1_0_1_0.wav
+        |-- 1_1_1_1_0_0_1_0.wav
+        |-- 1_1_1_1_1_0_0_0.wav
+        `-- 1_1_1_1_1_1_1_1.wav
+
+4 directories, 42 files
+
+
+
$ soxi tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav
+
+Input File     : 'tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav'
+Channels       : 1
+Sample Rate    : 8000
+Precision      : 16-bit
+Duration       : 00:00:06.76 = 54080 samples ~ 507 CDDA sectors
+File Size      : 108k
+Bit Rate       : 128k
+Sample Encoding: 16-bit Signed Integer PCM
+
+
+
    +
  • 0_0_1_0_1_0_0_1.wav

    +
    +

    0 means No; 1 means Yes. No and Yes are not in English, +but in Hebrew. +So this file contains NO NO YES NO YES NO NO YES.

    +
    +
  • +
+
+
+

Download kaldifeat

+

kaldifeat is used for extracting +features from a single or multiple sound files. Please refer to +https://github.com/csukuangfj/kaldifeat to install kaldifeat first.

+
+
+

Inference with a pre-trained model

+
$ cd egs/yesno/ASR
+$ ./tdnn/pretrained.py --help
+
+
+

shows the usage information of ./tdnn/pretrained.py.

+

To decode a single file, we can use:

+
./tdnn/pretrained.py \
+  --checkpoint ./tmp/icefall_asr_yesno_tdnn/pretrained.pt \
+  --words-file ./tmp/icefall_asr_yesno_tdnn/lang_phone/words.txt \
+  --HLG ./tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt \
+  ./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav
+
+
+

The output is:

+
2021-08-24 12:22:51,621 INFO [pretrained.py:119] {'feature_dim': 23, 'num_classes': 4, 'sample_rate': 8000, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './tmp/icefall_asr_yesno_tdnn/pretrained.pt', 'words_file': './tmp/icefall_asr_yesno_tdnn/lang_phone/words.txt', 'HLG': './tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt', 'sound_files': ['./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav']}
+2021-08-24 12:22:51,645 INFO [pretrained.py:125] device: cpu
+2021-08-24 12:22:51,645 INFO [pretrained.py:127] Creating model
+2021-08-24 12:22:51,650 INFO [pretrained.py:139] Loading HLG from ./tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt
+2021-08-24 12:22:51,651 INFO [pretrained.py:143] Constructing Fbank computer
+2021-08-24 12:22:51,652 INFO [pretrained.py:153] Reading sound files: ['./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav']
+2021-08-24 12:22:51,684 INFO [pretrained.py:159] Decoding started
+2021-08-24 12:22:51,708 INFO [pretrained.py:198]
+./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav:
+NO NO YES NO YES NO NO YES
+
+
+2021-08-24 12:22:51,708 INFO [pretrained.py:200] Decoding Done
+
+
+

You can see that for the sound file 0_0_1_0_1_0_0_1.wav, the decoding result is +NO NO YES NO YES NO NO YES.

+

To decode multiple files at the same time, you can use

+
./tdnn/pretrained.py \
+  --checkpoint ./tmp/icefall_asr_yesno_tdnn/pretrained.pt \
+  --words-file ./tmp/icefall_asr_yesno_tdnn/lang_phone/words.txt \
+  --HLG ./tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt \
+  ./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav \
+  ./tmp/icefall_asr_yesno_tdnn/test_waves/1_0_1_1_0_1_1_1.wav
+
+
+

The decoding output is:

+
2021-08-24 12:25:20,159 INFO [pretrained.py:119] {'feature_dim': 23, 'num_classes': 4, 'sample_rate': 8000, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'checkpoint': './tmp/icefall_asr_yesno_tdnn/pretrained.pt', 'words_file': './tmp/icefall_asr_yesno_tdnn/lang_phone/words.txt', 'HLG': './tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt', 'sound_files': ['./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav', './tmp/icefall_asr_yesno_tdnn/test_waves/1_0_1_1_0_1_1_1.wav']}
+2021-08-24 12:25:20,181 INFO [pretrained.py:125] device: cpu
+2021-08-24 12:25:20,181 INFO [pretrained.py:127] Creating model
+2021-08-24 12:25:20,185 INFO [pretrained.py:139] Loading HLG from ./tmp/icefall_asr_yesno_tdnn/lang_phone/HLG.pt
+2021-08-24 12:25:20,186 INFO [pretrained.py:143] Constructing Fbank computer
+2021-08-24 12:25:20,187 INFO [pretrained.py:153] Reading sound files: ['./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav',
+'./tmp/icefall_asr_yesno_tdnn/test_waves/1_0_1_1_0_1_1_1.wav']
+2021-08-24 12:25:20,213 INFO [pretrained.py:159] Decoding started
+2021-08-24 12:25:20,287 INFO [pretrained.py:198]
+./tmp/icefall_asr_yesno_tdnn/test_waves/0_0_1_0_1_0_0_1.wav:
+NO NO YES NO YES NO NO YES
+
+./tmp/icefall_asr_yesno_tdnn/test_waves/1_0_1_1_0_1_1_1.wav:
+YES NO YES YES NO YES YES YES
+
+2021-08-24 12:25:20,287 INFO [pretrained.py:200] Decoding Done
+
+
+

You can see again that it decodes correctly.

+
+
+
+

Colab notebook

+

We do provide a colab notebook for this recipe.

+

yesno colab notebook

+

Congratulations! You have finished the simplest speech recognition recipe in icefall.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/search.html b/search.html new file mode 100644 index 000000000..a66167297 --- /dev/null +++ b/search.html @@ -0,0 +1,119 @@ + + + + + + Search — icefall 0.1 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • »
  • +
  • Search
  • +
  • +
  • +
+
+
+
+
+ + + + +
+ +
+ +
+
+
+ +
+ +
+

© Copyright 2021, icefall development team.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/searchindex.js b/searchindex.js new file mode 100644 index 000000000..51d4ef785 --- /dev/null +++ b/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["contributing/code-style", "contributing/doc", "contributing/how-to-create-a-recipe", "contributing/index", "huggingface/index", "huggingface/pretrained-models", "huggingface/spaces", "index", "installation/index", "recipes/aishell/conformer_ctc", "recipes/aishell/index", "recipes/aishell/stateless_transducer", "recipes/aishell/tdnn_lstm_ctc", "recipes/index", "recipes/librispeech/conformer_ctc", "recipes/librispeech/index", "recipes/librispeech/tdnn_lstm_ctc", "recipes/timit/index", "recipes/timit/tdnn_ligru_ctc", "recipes/timit/tdnn_lstm_ctc", "recipes/yesno/index", "recipes/yesno/tdnn"], "filenames": ["contributing/code-style.rst", "contributing/doc.rst", "contributing/how-to-create-a-recipe.rst", "contributing/index.rst", "huggingface/index.rst", "huggingface/pretrained-models.rst", "huggingface/spaces.rst", "index.rst", "installation/index.rst", "recipes/aishell/conformer_ctc.rst", "recipes/aishell/index.rst", "recipes/aishell/stateless_transducer.rst", "recipes/aishell/tdnn_lstm_ctc.rst", "recipes/index.rst", "recipes/librispeech/conformer_ctc.rst", "recipes/librispeech/index.rst", "recipes/librispeech/tdnn_lstm_ctc.rst", "recipes/timit/index.rst", "recipes/timit/tdnn_ligru_ctc.rst", "recipes/timit/tdnn_lstm_ctc.rst", "recipes/yesno/index.rst", "recipes/yesno/tdnn.rst"], "titles": ["Follow the code style", "Contributing to Documentation", "How to create a recipe", "Contributing", "Huggingface", "Pre-trained models", "Huggingface spaces", "Icefall", "Installation", "Conformer CTC", "aishell", "Stateless Transducer", "TDNN-LSTM CTC", "Recipes", "Conformer CTC", "LibriSpeech", "TDNN-LSTM-CTC", "TIMIT", "TDNN-LiGRU-CTC", "TDNN-LSTM-CTC", "YesNo", "TDNN-CTC"], "terms": {"we": [0, 1, 2, 3, 5, 6, 8, 9, 10, 11, 12, 13, 14, 16, 18, 19, 21], "us": [0, 1, 2, 4, 6, 7, 8, 9, 10, 11, 12, 14, 16, 18, 19, 21], "tool": 0, "make": [0, 1, 3, 9, 11, 14], "consist": [0, 11], "possibl": [0, 2, 3, 8, 9, 14], "black": 0, "format": [0, 9, 11, 12, 14, 16, 18, 19, 21], "flake8": 0, "check": [0, 14], "qualiti": [0, 10], "isort": 0, "sort": [0, 8], "import": 0, "The": [0, 1, 2, 6, 8, 9, 10, 12, 14, 16, 18, 19, 21], "version": [0, 8, 9, 11, 12, 14, 16, 18, 19], "abov": [0, 8, 9, 10, 11, 12, 14, 21], "ar": [0, 1, 3, 8, 9, 10, 11, 12, 13, 14, 16, 18, 19, 21], "12": [0, 8, 9, 11, 12, 14, 18, 21], "6b0": 0, "3": [0, 7, 12, 16, 21], "9": [0, 8, 9, 11, 12, 14, 16, 18, 21], "2": [0, 7, 16, 18, 19, 21], "5": [0, 9, 11, 12, 14, 16, 18, 19, 21], "after": [0, 1, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "run": [0, 2, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "command": [0, 1, 8, 9, 11, 12, 14, 16, 18, 19, 21], "git": [0, 8, 9, 11, 12, 14, 16, 18, 19, 21], "clone": [0, 8, 9, 11, 12, 14, 16, 18, 19, 21], "http": [0, 1, 2, 5, 6, 8, 9, 10, 11, 12, 14, 16, 18, 19, 21], "github": [0, 2, 5, 8, 9, 11, 12, 14, 16, 18, 19, 21], "com": [0, 2, 5, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "k2": [0, 2, 5, 6, 7, 9, 11, 12, 14, 16, 18, 19], "fsa": [0, 2, 5, 6, 8, 9, 11, 14], "icefal": [0, 2, 3, 5, 6, 9, 11, 12, 13, 14, 16, 18, 19, 21], "cd": [0, 1, 2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "pip": [0, 1, 8, 11], "instal": [0, 1, 4, 6, 7, 21], "pre": [0, 3, 4, 6, 7, 8], "commit": 0, "whenev": 0, "you": [0, 1, 2, 5, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "automat": [0, 6], "hook": 0, "invok": 0, "fail": [0, 8], "If": [0, 2, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "ani": [0, 8, 9, 11, 12, 14, 21], "your": [0, 1, 2, 4, 6, 7, 9, 11, 12, 14, 16, 18, 19, 21], "wa": [0, 8, 14, 16], "success": [0, 8], "pleas": [0, 1, 2, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "fix": [0, 8, 14], "issu": [0, 8, 14], "report": [0, 8], "some": [0, 1, 9, 11, 12, 14, 16, 18, 19, 21], "i": [0, 1, 2, 6, 8, 9, 10, 11, 12, 14, 16, 18, 19, 21], "e": [0, 2, 9, 11, 12, 14, 16, 18, 19, 21], "modifi": [0, 9, 12, 14, 16, 18, 19, 21], "file": [0, 2, 6, 9, 11, 12, 14, 16, 18, 19, 21], "place": [0, 8, 11, 14, 16], "so": [0, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "statu": 0, "failur": 0, "see": [0, 1, 6, 9, 11, 12, 14, 16, 18, 19, 21], "which": [0, 2, 6, 9, 10, 11, 12, 14, 16, 18, 19, 21], "ha": [0, 2, 9, 11, 12, 14, 16, 18, 19], "been": [0, 11], "befor": [0, 1, 9, 11, 12, 14], "further": 0, "chang": [0, 9, 11, 12, 14, 16, 18, 19, 21], "all": [0, 5, 6, 9, 11, 12, 14, 16, 18, 19, 21], "again": [0, 21], "should": [0, 2, 9, 11, 12, 14, 16, 18, 19, 21], "succe": 0, "thi": [0, 2, 3, 4, 8, 9, 11, 12, 13, 14, 16, 18, 19, 21], "time": [0, 9, 11, 12, 14, 16, 18, 19, 21], "succeed": 0, "want": [0, 8, 9, 11, 12, 14, 16, 18, 19, 21], "can": [0, 1, 2, 5, 6, 8, 9, 10, 11, 12, 14, 16, 18, 19, 21], "do": [0, 2, 9, 11, 12, 14, 16, 18, 19, 21], "21": [0, 8, 9, 11, 14, 18, 19], "your_changed_fil": 0, "py": [0, 2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "sphinx": 1, "write": [1, 2, 3], "have": [1, 2, 5, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "prepar": [1, 3], "environ": [1, 9, 10, 11, 12, 14, 16, 18, 19, 21], "doc": 1, "r": [1, 8, 18, 19], "requir": [1, 8], "txt": [1, 8, 9, 11, 12, 14, 16, 18, 19, 21], "set": [1, 8, 9, 11, 12, 14, 21], "up": [1, 8, 9, 12, 14, 16], "readi": [1, 9, 14], "refer": [1, 2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "restructuredtext": 1, "primer": 1, "familiar": 1, "build": [1, 8, 9, 11, 14], "local": [1, 8], "preview": 1, "what": [1, 2, 8, 11], "look": [1, 2, 5, 8, 9, 11, 12, 14], "like": [1, 2, 6, 8, 9, 11, 12, 14, 21], "publish": [1, 10], "html": [1, 2, 8], "gener": [1, 9, 11, 12, 14], "view": [1, 9, 11, 12, 14, 21], "follow": [1, 2, 3, 5, 6, 7, 8, 9, 11, 12, 14, 16, 18, 19, 21], "python3": [1, 8], "m": [1, 11, 18, 19], "server": [1, 6, 8], "It": [1, 2, 4, 8, 9, 10, 11, 12, 14, 16, 18, 19, 21], "print": [1, 9, 11, 12, 14, 16, 18, 19, 21], "serv": 1, "0": [1, 7, 9, 11, 12, 14, 16, 18, 19, 21], "port": 1, "8000": [1, 21], "open": [1, 10, 11, 14], "browser": [1, 4, 6], "go": [1, 9, 11, 14], "read": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "code": [2, 3, 7, 9, 14, 16, 18, 19, 21], "style": [2, 3, 7], "adjust": 2, "sytl": 2, "design": 2, "python": [2, 8, 9, 11, 14], "recommend": [2, 8, 9, 11, 12, 14], "test": [2, 7, 9, 11, 12, 14, 16, 18, 19], "valid": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "dataset": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "lhots": [2, 7, 9, 11, 14], "readthedoc": [2, 8], "io": [2, 8], "en": [2, 8], "latest": [2, 6, 8, 14, 16], "index": [2, 8], "yesno": [2, 7, 8, 13, 21], "veri": [2, 3, 11, 18, 19, 21], "good": 2, "exampl": [2, 6, 7, 16, 18, 19, 21], "speech": [2, 6, 7, 8, 10, 11, 13, 21], "pull": [2, 9, 11, 14], "380": [2, 19], "show": [2, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "add": [2, 9, 11, 12, 13], "new": [2, 3, 6, 8, 9, 10, 11, 12, 14, 16, 21], "suppos": 2, "would": [2, 8, 14, 16], "name": [2, 9, 11], "foo": [2, 9, 14], "eg": [2, 5, 8, 9, 11, 12, 14, 16, 18, 19, 21], "mkdir": [2, 9, 11, 12, 14, 16, 18, 19, 21], "p": [2, 8, 11, 18, 19], "asr": [2, 5, 8, 9, 11, 12, 14, 16, 18, 19, 21], "touch": 2, "sh": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "chmod": 2, "x": 2, "simpl": [2, 11], "own": 2, "otherwis": [2, 9, 11, 14], "librispeech": [2, 5, 7, 13, 14, 16], "assum": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "fanci": 2, "call": 2, "bar": [2, 9, 14], "organ": 2, "wai": [2, 3], "readm": [2, 9, 11, 12, 14, 16, 18, 19, 21], "md": [2, 5, 9, 11, 12, 14, 16, 18, 19, 21], "asr_datamodul": [2, 8], "pretrain": [2, 9, 11, 12, 14, 16, 18, 19, 21], "For": [2, 5, 9, 11, 12, 14, 16, 18, 19, 21], "instanc": [2, 5, 9, 11, 12, 14, 16, 18, 19, 21], "tdnn": [2, 8, 10, 13, 15, 17, 20], "its": [2, 11], "directori": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "structur": 2, "descript": [2, 9, 11, 12, 14, 16, 18, 19, 21], "contain": [2, 7, 9, 11, 12, 13, 14, 16, 18, 19, 21], "inform": [2, 9, 11, 12, 14, 16, 18, 19, 21], "g": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "wer": [2, 8, 16, 18, 19, 21], "etc": [2, 9, 11, 12, 14, 16, 18, 19, 21], "provid": [2, 6, 8, 9, 10, 11, 12, 13, 14, 16, 18, 19, 21], "pytorch": [2, 7, 11], "dataload": [2, 8], "take": [2, 21], "input": [2, 9, 11, 12, 14, 16, 18, 19, 21], "checkpoint": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "save": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "dure": [2, 6, 9, 11, 12, 14, 16, 18, 19, 21], "stage": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "definit": 2, "neural": [2, 9, 14], "network": [2, 9, 11, 14], "script": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "infer": 2, "tdnn_lstm_ctc": [2, 12, 16, 19], "conformer_ctc": [2, 9, 14], "get": [2, 6, 8, 9, 11, 12, 14, 16, 21], "feel": 2, "result": [2, 5, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "everi": 2, "kept": 2, "self": 2, "toler": 2, "duplic": 2, "among": [2, 8], "differ": [2, 8, 9, 10, 14], "invoc": 2, "help": [2, 9, 11, 12, 14, 16, 18, 19, 21], "blob": [2, 5], "master": [2, 5, 11], "transform": [2, 9, 14], "conform": [2, 10, 11, 13, 15], "base": [2, 9, 11, 12, 14], "lstm": [2, 10, 13, 15, 17], "attent": [2, 11, 12], "lm": [2, 8, 11, 16, 18, 19, 21], "rescor": [2, 12, 16, 18, 19, 21], "demonstr": [2, 4, 6], "consid": 2, "colab": 2, "notebook": 2, "welcom": 3, "There": [3, 9, 11, 12, 14], "mani": 3, "two": [3, 9, 11, 12, 14, 16, 18, 19, 21], "them": [3, 4, 5, 6, 9, 11, 12, 14, 16, 18, 19, 21], "To": [3, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "document": [3, 7], "repositori": 3, "recip": [3, 5, 7, 8, 9, 11, 12, 14, 16, 18, 19, 21], "In": [3, 6, 9, 11, 12, 14, 16, 18, 19, 21], "page": [3, 6, 9, 11, 12, 13, 14, 16, 18, 19, 21], "describ": [3, 4, 9, 11, 12, 14, 16, 18, 19], "how": [3, 4, 6, 7, 8, 9, 11, 12, 14, 16, 18, 19, 21], "creat": [3, 7, 9, 11, 12, 14, 16, 18, 19, 21], "data": [3, 10], "train": [3, 4, 6, 7], "decod": [3, 6], "model": [3, 4, 6, 7, 8], "section": [4, 8, 9, 14], "find": [4, 5, 6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "also": [4, 5, 8, 9, 11, 12, 14, 21], "try": [4, 6], "from": [4, 6, 8, 9, 10, 11, 12, 14, 16, 18, 19, 21], "within": [4, 6], "without": [4, 6, 9, 14], "anyth": [4, 6], "space": [4, 7], "youtub": [4, 7, 14, 16], "video": [4, 7, 14, 16], "upload": [5, 6, 9, 11, 12, 14, 16, 18, 19, 21], "huggingfac": [5, 7, 9, 11, 12, 14, 16, 18, 19, 21], "co": [5, 6, 9, 10, 11, 12, 14, 16, 18, 19, 21], "visit": [5, 6], "link": [5, 8], "search": [5, 6], "specif": [5, 11], "correspond": [5, 6], "aishel": [5, 7, 9, 11, 12, 13], "gigaspeech": 5, "wenetspeech": 5, "integr": 6, "framework": 6, "sherpa": 6, "need": [6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "download": [6, 7, 10], "window": 6, "maco": 6, "linux": 6, "even": [6, 8], "ipad": 6, "phone": 6, "start": [6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "address": [6, 11], "recognit": [6, 7, 10, 11, 13, 21], "screenshot": [6, 9, 11, 12, 14, 21], "select": [6, 16, 18, 19, 21], "languag": [6, 9, 11, 12], "current": [6, 8, 11, 13], "chines": [6, 10, 11], "english": [6, 21], "target": 6, "method": [6, 8, 9, 11, 12, 14, 16, 18, 19], "greedi": 6, "modified_beam_search": [6, 11], "choos": [6, 8], "number": [6, 9, 11, 12, 14, 16, 18, 19, 21], "activ": 6, "path": [6, 9, 11, 12, 14], "either": [6, 9, 11, 12, 14], "record": [6, 9, 10, 11, 12, 14, 16, 18, 19, 21], "click": [6, 8, 9, 11, 12, 14, 21], "button": 6, "submit": 6, "wait": 6, "moment": 6, "an": [6, 8, 9, 10, 11, 14, 21], "when": [6, 11, 14], "bottom": 6, "part": [6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "tabl": 6, "one": [6, 9, 11, 12, 14, 16, 18, 19, 21], "next": [6, 8, 14, 16], "gen": [6, 8, 14, 16], "kaldi": [6, 8, 14, 16], "subscrib": [6, 8, 14, 16], "channel": [6, 8, 9, 11, 12, 14, 16, 18, 19, 21], "nadira": [6, 8, 14, 16], "povei": [6, 8, 14, 16], "www": [6, 8, 10, 14, 16], "uc_vaumpkminz1pnkfxan9mw": [6, 8, 14, 16], "torchaudio": 7, "1": [7, 16, 18, 19, 21], "timit": [7, 13, 18, 19], "contribut": 7, "depend": [8, 9, 14], "step": [8, 9, 11, 12, 14, 21], "order": [8, 9, 12, 14, 16, 18, 19], "matter": 8, "org": [8, 10, 11], "least": 8, "v1": [8, 9, 12, 14, 16, 18, 19], "alreadi": 8, "don": [8, 9, 12, 14, 16, 18, 19, 21], "t": [8, 9, 11, 12, 14, 16, 18, 19, 21], "replac": 8, "compil": [8, 9, 11, 14], "against": 8, "strongli": 8, "collect": 8, "variabl": [8, 9, 12, 14], "pythonpath": 8, "point": [8, 9, 12, 14], "folder": [8, 9, 11, 12, 14, 16, 18, 19, 21], "tmp": [8, 9, 11, 12, 14, 16, 18, 19, 21], "setup": [8, 9, 11, 12, 14, 16, 18, 19, 21], "export": [8, 9, 11, 12, 14, 16, 18, 19, 21], "put": 8, "sever": [8, 9, 11, 12, 14, 16, 18, 19, 21], "same": [8, 9, 11, 12, 14, 16, 18, 19, 21], "switch": [8, 9, 14], "just": 8, "about": [8, 11], "virtualenv": 8, "8": [8, 9, 11, 14, 16, 21], "cpython3": 8, "6": [8, 9, 11, 14, 16, 18, 19], "final": [8, 14, 16], "64": [8, 11], "1540m": 8, "creator": 8, "cpython3posix": 8, "dest": 8, "ceph": [8, 9, 11, 14], "fj": [8, 11, 14], "fangjun": [8, 11, 14], "clear": 8, "fals": [8, 9, 11, 14], "no_vcs_ignor": 8, "global": 8, "seeder": 8, "fromappdata": 8, "bundl": 8, "setuptool": 8, "wheel": 8, "via": 8, "copi": 8, "app_data_dir": 8, "root": 8, "share": 8, "v": [8, 14, 18, 19], "irtualenv": 8, "ad": [8, 9, 11, 12, 14, 21], "seed": 8, "packag": 8, "57": [8, 14, 16], "36": [8, 11, 14], "bashactiv": 8, "cshellactiv": 8, "fishactiv": 8, "powershellactiv": 8, "pythonactiv": 8, "xonshactiv": 8, "sourc": [8, 9, 10, 11, 14], "bin": [8, 9, 14], "dev20210822": 8, "cpu": [8, 9, 21], "torch1": 8, "f": [8, 18, 19], "nightli": 8, "whl": 8, "2bcpu": 8, "cp38": 8, "linux_x86_64": 8, "mb": 8, "________________________________": 8, "185": [8, 9, 14, 21], "kb": [8, 18, 19], "graphviz": 8, "17": [8, 9, 14, 18, 19], "py3": 8, "none": [8, 9, 14], "18": [8, 9, 11, 12, 14, 16, 18, 19], "torch": [8, 9, 11, 14], "cach": 8, "manylinux1_x86_64": 8, "831": [8, 11, 19], "type": [8, 9, 11, 14, 21], "extens": 8, "typing_extens": 8, "10": [8, 9, 11, 12, 14, 16, 18, 19, 21], "26": [8, 11, 14, 19], "successfulli": 8, "probabl": [8, 11], "cuda": [8, 9, 11, 12, 14, 16, 18, 19], "req": 8, "7b1b76ge": 8, "q": 8, "audioread": 8, "soundfil": 8, "post1": 8, "py2": 8, "7": [8, 9, 12, 14, 16, 18, 19], "97": [8, 9], "cytoolz": 8, "11": [8, 9, 11, 12, 14, 16, 18, 19, 21], "manylinux_2_17_x86_64": 8, "manylinux2014_x86_64": 8, "dataclass": 8, "14": [8, 9, 14, 16, 18], "h5py": 8, "manylinux_2_12_x86_64": 8, "manylinux2010_x86_64": 8, "684": [8, 9, 21], "intervaltre": 8, "lilcom": 8, "numpi": 8, "15": [8, 11, 12, 14, 18, 21], "40": [8, 12, 14, 16, 18, 19], "pyyaml": 8, "662": 8, "tqdm": 8, "62": [8, 14, 16], "76": [8, 21], "73": 8, "satisfi": 8, "lib": 8, "site": 8, "dev": [8, 9, 11, 12, 14, 16, 18, 19, 21], "2a1410b": 8, "clean": [8, 9, 11, 14, 16], "toolz": 8, "55": [8, 12, 14, 18], "sortedcontain": 8, "29": [8, 9, 11, 12, 14, 16, 18, 19], "cffi": 8, "411": [8, 14], "pycpars": 8, "20": [8, 9, 11, 12, 14, 16, 18, 19, 21], "112": 8, "pypars": 8, "67": 8, "done": [8, 9, 11, 12, 14, 16, 18, 19, 21], "filenam": 8, "dev_2a1410b_clean": 8, "size": [8, 9, 11, 12, 14, 16, 18, 19, 21], "342242": 8, "sha256": 8, "f683444afa4dc0881133206b4646a": 8, "9d0f774224cc84000f55d0a67f6e4a37997": 8, "store": [8, 14], "ephem": 8, "ftu0qysz": 8, "7f": 8, "7a": 8, "8e": 8, "a0bf241336e2e3cb573e1e21e5600952d49f5162454f2e612f": 8, "warn": 8, "built": 8, "invalid": [8, 14], "metadata": [8, 18, 19], "mandat": 8, "pep": 8, "440": 8, "packa": 8, "ging": 8, "deprec": [8, 11], "legaci": 8, "becaus": 8, "could": [8, 9, 12], "A": [8, 9, 11, 12, 14, 16], "discuss": 8, "regard": 8, "pypa": 8, "sue": 8, "8368": 8, "inter": 8, "valtre": 8, "sor": 8, "tedcontain": 8, "remot": 8, "enumer": 8, "object": [8, 9, 11, 12, 21], "500": [8, 11, 14], "count": 8, "100": [8, 9, 11, 12, 14], "compress": 8, "308": [8, 9, 11, 12], "total": [8, 9, 11, 12, 14, 16, 21], "delta": 8, "263": 8, "reus": 8, "307": 8, "102": [8, 9], "pack": 8, "receiv": 8, "172": 8, "49": [8, 14, 19, 21], "kib": 8, "385": 8, "00": [8, 9, 11, 12, 14, 16, 18, 19, 21], "resolv": 8, "kaldilm": 8, "tar": 8, "gz": 8, "48": [8, 9, 11], "574": 8, "kaldialign": 8, "sentencepiec": [8, 14], "96": 8, "tensorboard": [8, 9, 11, 12, 14, 16, 18, 19, 21], "41": [8, 9, 11, 18, 21], "line": 8, "absl": 8, "absl_pi": 8, "13": [8, 11, 12, 14, 16, 18], "132": 8, "googl": 8, "auth": 8, "oauthlib": 8, "google_auth_oauthlib": 8, "grpcio": 8, "24": [8, 12, 16, 18, 19, 21], "39": [8, 11, 14, 16, 18], "ment": 8, "requi": 8, "rement": 8, "protobuf": 8, "manylinux_2_5_x86_64": 8, "werkzeug": 8, "288": 8, "tensorboard_data_serv": 8, "google_auth": 8, "35": [8, 11, 14], "152": 8, "request": 8, "plugin": 8, "wit": 8, "tensorboard_plugin_wit": 8, "781": 8, "markdown": 8, "six": 8, "16": [8, 9, 11, 12, 14, 16, 18, 19, 21], "cachetool": 8, "rsa": 8, "34": 8, "pyasn1": 8, "modul": 8, "pyasn1_modul": 8, "155": 8, "requests_oauthlib": 8, "23": [8, 9, 11, 12, 14, 18, 19, 21], "77": [8, 14], "urllib3": 8, "27": [8, 9, 11, 16, 19], "138": [8, 9, 11], "certifi": 8, "2017": 8, "2021": [8, 9, 12, 14, 16, 18, 19, 21], "30": [8, 9, 11, 12, 14, 21], "145": 8, "charset": 8, "normal": [8, 16, 18, 19, 21], "charset_norm": 8, "idna": 8, "59": [8, 12, 14], "146": 8, "897233": 8, "eccb906cafcd45bf9a7e1a1718e4534254bfb": 8, "f4c0d0cbc66eee6c88d68a63862": 8, "85": 8, "7d": 8, "63": [8, 11], "f2dd586369b8797cb36d213bf3a84a789eeb92db93d2e723c9": 8, "etool": 8, "oaut": 8, "hlib": 8, "let": [8, 9, 14], "u": [8, 9, 11, 12, 14, 21], "log": [8, 16, 18, 19, 21], "08": [8, 14, 16, 18, 19, 21], "19": [8, 9, 14, 16, 18, 19], "main": [8, 9, 14], "dl_dir": [8, 9, 12, 14], "waves_yesno": 8, "49mb": 8, "03": [8, 11, 14, 18, 19], "39mb": 8, "manifest": 8, "31": [8, 14], "42": [8, 9, 14, 21], "comput": [8, 9, 11, 12, 16, 18, 19, 21], "fbank": [8, 9, 11, 12, 14, 16, 18, 19, 21], "32": [8, 9, 11, 12], "803": 8, "info": [8, 9, 11, 12, 14, 16, 18, 19, 21], "compute_fbank_yesno": 8, "52": [8, 9, 14], "process": [8, 9, 11, 12, 14], "extract": [8, 9, 11, 12, 14, 16, 18, 19, 21], "featur": [8, 9, 11, 12, 14, 16, 18, 19, 21], "_______________________________________________________________": 8, "90": 8, "01": [8, 11, 12, 14], "80": [8, 9, 11, 14], "57it": 8, "085": 8, "______________________________________________________________": 8, "248": [8, 11], "21it": 8, "lang": [8, 11, 14], "fcordre9": 8, "kaldilm_6899d26f2d684ad48f21025950cd2866": 8, "csrc": [8, 14], "arpa_file_pars": 8, "cc": 8, "void": 8, "arpafilepars": 8, "rea": 8, "d": [8, 18, 19], "std": 8, "istream": 8, "79": 8, "140": [8, 12], "gram": [8, 9, 11, 12, 14, 16, 18, 19], "89": [8, 9], "hlg": [8, 16, 18, 19, 21], "928": 8, "compile_hlg": 8, "120": 8, "lang_phon": [8, 12, 16, 18, 19, 21], "929": [8, 11], "lexicon": [8, 9, 11, 12, 14, 21], "116": 8, "convert": [8, 14], "l": [8, 11, 18, 19, 21], "pt": [8, 9, 11, 12, 14, 16, 18, 19, 21], "linv": [8, 11, 14, 21], "931": 8, "ctc_topo": 8, "max_token_id": 8, "932": 8, "load": [8, 9, 11, 12, 14, 16, 18, 19, 21], "fst": [8, 11, 21], "intersect": 8, "933": 8, "lg": 8, "shape": 8, "66": 8, "connect": [8, 14, 16], "68": [8, 14], "70": 8, "class": [8, 14], "tensor": [8, 9, 11, 12, 14, 21], "71": [8, 14, 16], "determin": 8, "934": 8, "74": 8, "_k2": 8, "raggedint": 8, "remov": [8, 9, 11, 12, 14, 16, 18, 19], "disambigu": 8, "symbol": [8, 11], "87": 8, "remove_epsilon": 8, "935": 8, "92": [8, 14], "arc": 8, "95": [8, 10], "compos": 8, "h": 8, "105": [8, 14], "936": 8, "107": [8, 16], "123": 8, "now": [8, 9, 14, 16, 18, 19], "cuda_visible_devic": [8, 9, 11, 12, 14, 16, 18, 19, 21], "gpu": [8, 9, 11, 12, 14, 18, 19, 21], "avail": [8, 9, 11, 14, 16, 18, 19, 21], "given": [8, 9, 11, 12, 14, 16], "below": [8, 9, 11, 12, 14, 16, 18, 19, 21], "072": 8, "465": 8, "466": 8, "exp_dir": [8, 11, 14], "posixpath": [8, 11, 14], "exp": [8, 9, 11, 12, 14, 16, 18, 19, 21], "lang_dir": [8, 11, 14], "lr": [8, 11], "feature_dim": [8, 9, 11, 14, 21], "weight_decai": 8, "1e": 8, "06": [8, 12, 14, 16, 21], "start_epoch": 8, "best_train_loss": 8, "inf": 8, "best_valid_loss": 8, "best_train_epoch": 8, "best_valid_epoch": 8, "batch_idx_train": 8, "log_interv": 8, "valid_interv": 8, "beam_siz": [8, 11], "reduct": 8, "sum": 8, "use_doub": 8, "le_scor": 8, "true": [8, 9, 11, 14, 16, 18, 19, 21], "world_siz": 8, "master_port": 8, "12354": 8, "num_epoch": 8, "feature_dir": [8, 14], "max_dur": [8, 14], "bucketing_sampl": [8, 14], "num_bucket": [8, 14], "concatenate_cut": [8, 14], "duration_factor": [8, 14], "gap": [8, 14], "on_the_fly_feat": [8, 14], "shuffl": [8, 14], "return_cut": [8, 14], "num_work": [8, 14], "074": 8, "113": [8, 11, 14], "098": [8, 16], "cut": [8, 14], "240": [8, 9, 21], "149": [8, 14], "200": [8, 9, 14, 18, 19, 21], "singlecutsampl": 8, "206": [8, 14], "219": [8, 11, 14], "246": [8, 11, 14, 18, 19], "357": 8, "416": 8, "epoch": [8, 9, 11, 12, 14, 16, 18, 19, 21], "batch": [8, 9, 11, 12, 14], "avg": [8, 11, 14, 16, 18, 19, 21], "loss": [8, 9, 12, 14, 16, 18, 19, 21], "0789": 8, "848": 8, "5356": 8, "7556": 8, "301": [8, 14], "432": [8, 14], "9972": 8, "best": [8, 9, 12, 14], "805": 8, "2436": 8, "5717": 8, "33": [8, 9, 10, 11, 14, 18], "109": [8, 9, 14], "4167": 8, "121": [8, 16], "325": 8, "2214": 8, "798": [8, 11], "0781": 8, "1343": 8, "065": 8, "0859": 8, "556": 8, "0421": 8, "0975": 8, "810": 8, "0431": 8, "824": 8, "657": 8, "0109": 8, "984": [8, 14], "0093": 8, "0096": 8, "50": [8, 14, 18], "239": [8, 11], "0104": 8, "0101": 8, "569": 8, "0092": 8, "819": [8, 18], "835": 8, "51": [8, 9, 14, 21], "024": 8, "0105": 8, "317": 8, "0099": 8, "0097": 8, "552": 8, "0108": 8, "869": 8, "0102": 8, "126": [8, 14], "128": [8, 14], "537": [8, 14], "192": [8, 14], "249": 8, "250": [8, 11, 16], "lm_dir": [8, 14], "search_beam": [8, 9, 14, 21], "output_beam": [8, 9, 14, 21], "min_active_st": [8, 9, 14, 21], "max_active_st": [8, 9, 14, 21], "10000": [8, 9, 14, 21], "use_double_scor": [8, 9, 14, 21], "193": 8, "213": [8, 21], "259": [8, 9], "devic": [8, 9, 11, 12, 14, 16, 18, 19, 21], "217": [8, 9, 14], "279": [8, 14], "averag": [8, 9, 11, 12, 14, 16, 18, 19, 21], "userwarn": [8, 11], "floor_divid": 8, "futur": [8, 11, 13], "round": [8, 11], "toward": [8, 11], "trunc": [8, 11], "function": [8, 9, 11, 12, 14, 16, 18, 19, 21], "NOT": [8, 9, 11, 14, 21], "floor": [8, 11], "incorrect": [8, 11], "neg": [8, 11], "valu": [8, 9, 11, 12, 14], "keep": [8, 11], "behavior": [8, 11], "div": [8, 11], "b": [8, 11, 14, 18, 19], "rounding_mod": [8, 11], "actual": [8, 9, 11, 12, 14], "divis": [8, 11], "trigger": 8, "intern": 8, "aten": 8, "src": 8, "nativ": 8, "binaryop": 8, "cpp": 8, "450": [8, 9, 11, 12], "k": [8, 18, 19], "n": [8, 9, 14, 18, 19], "220": [8, 11, 12, 14], "409": 8, "190": [8, 16], "until": [8, 14], "571": [8, 14], "228": [8, 14], "transcript": [8, 9, 10, 11, 12, 14, 16, 18, 19], "recog": [8, 11, 14], "test_set": [8, 21], "572": 8, "util": [8, 14], "ins": [8, 14, 21], "del": [8, 14, 21], "sub": [8, 14, 21], "573": 8, "236": 8, "wrote": [8, 14], "detail": [8, 9, 11, 12, 14, 16, 18, 19, 21], "error": [8, 14], "stat": [8, 14], "err": [8, 11, 14], "299": 8, "congratul": [8, 9, 12, 14, 16, 18, 19, 21], "first": [8, 9, 11, 12, 14, 16, 18, 19, 21], "fun": 8, "debug": 8, "variou": [8, 13], "problem": 8, "mai": [8, 9, 11, 12, 13, 14], "encount": [8, 9, 11, 12, 14], "while": [8, 9, 11, 12, 14], "tutori": [9, 11, 12, 14, 16, 18, 19], "learn": [9, 11, 12, 14, 16, 18, 19, 21], "singl": [9, 11, 12, 14, 16, 18, 19, 21], "multipl": [9, 11, 12, 14, 16, 18, 19, 21], "1best": [9, 12, 14, 16, 18, 19], "handl": [9, 12, 14, 16, 18, 19, 21], "automag": [9, 12, 14, 16, 18, 19, 21], "stop": [9, 11, 12, 14, 16, 18, 19, 21], "control": [9, 11, 12, 14, 16, 18, 19, 21], "By": [9, 12, 14, 16, 18, 19, 21], "default": [9, 11, 12, 14, 16, 18, 19, 21], "execut": [9, 12, 14, 16, 18, 19, 21], "mean": [9, 11, 12, 14, 16, 18, 19, 21], "onli": [9, 11, 12, 13, 14, 16, 18, 19, 21], "musan": [9, 12, 14], "sai": [9, 11, 12, 14, 16, 18, 19, 21], "thei": [9, 11, 12, 14], "won": [9, 12, 14], "re": [9, 12, 14], "intal": [9, 12], "initi": [9, 12], "lf": [9, 11, 12, 14, 16, 18, 19, 21], "sudo": [9, 12], "apt": [9, 12], "permiss": [9, 12], "binari": [9, 11, 12, 14, 21], "here": [9, 11, 12, 14, 16], "pass": [9, 11, 12, 14], "commandlin": [9, 11, 12, 14], "quit": [9, 11, 12, 14], "often": [9, 11, 12, 14], "dir": [9, 11, 12, 14, 16, 18, 19, 21], "experi": [9, 11, 12, 14, 21], "num": [9, 11, 12, 14], "resum": [9, 11, 12, 14, 16, 18, 19, 21], "state": [9, 11, 12, 14], "world": [9, 11, 12, 14, 16], "multi": [9, 11, 12, 14], "machin": [9, 11, 12, 14], "ddp": [9, 11, 12, 14], "4": [9, 11, 12, 14, 16, 18, 19, 21], "implement": [9, 11, 12, 14], "present": [9, 11, 12, 14], "later": [9, 12, 14, 16, 18, 19], "max": [9, 11, 12, 14], "durat": [9, 11, 12, 14, 16, 18, 19, 21], "specifi": [9, 11, 12, 14, 16, 18, 19, 21], "second": [9, 11, 12, 14, 21], "over": [9, 11, 12, 14], "utter": [9, 11, 12, 14], "pad": [9, 11, 12, 14], "oom": [9, 11, 12, 14], "reduc": [9, 11, 12, 14, 16, 18, 19, 21], "v100": [9, 11, 12, 14], "nvidia": [9, 11, 12, 14], "due": [9, 11, 12, 14], "usual": [9, 11, 12, 14], "larger": [9, 11, 12, 14], "than": [9, 11, 12, 14, 16, 21], "caus": [9, 11, 12, 14], "smaller": [9, 11, 12, 14], "increas": [9, 11, 12, 14], "tune": [9, 11, 12, 14], "weight": [9, 12, 14], "decai": [9, 12, 14], "warmup": [9, 11, 12, 14], "get_param": [9, 11, 12, 14, 16, 18, 19, 21], "paramet": [9, 11, 12, 14, 16, 18, 19], "realli": [9, 12, 14], "directli": [9, 11, 12, 14], "perturb": [9, 11, 12, 14], "speed": [9, 11, 12, 14], "factor": [9, 11, 12, 14], "each": [9, 11, 12, 14], "3x150": [9, 11, 12], "hour": [9, 11, 12, 14], "These": [9, 11, 12, 14, 16, 18, 19, 21], "state_dict": [9, 11, 12, 14, 16, 18, 19, 21], "optim": [9, 11, 12, 14, 16, 18, 19, 21], "rate": [9, 11, 12, 14, 16, 18, 19, 21], "visual": [9, 11, 12, 14, 16, 18, 19, 21], "logdir": [9, 11, 12, 14, 16, 18, 19, 21], "labelsmooth": 9, "someth": [9, 11, 12, 14, 21], "tensorflow": [9, 11, 12, 14, 21], "found": [9, 11, 12, 14, 21], "continu": [9, 11, 12, 14, 21], "press": [9, 11, 12, 14, 21], "ctrl": [9, 11, 12, 14, 21], "engw8ksktzqs24zbv5dgcg": 9, "22t11": 9, "09": [9, 11, 12, 14], "scan": [9, 11, 12, 14, 21], "116068": 9, "scalar": [9, 11, 12, 14, 21], "listen": [9, 11, 12, 21], "note": [9, 11, 12, 14, 16, 18, 19, 21], "url": [9, 11, 12, 14, 21], "output": [9, 11, 12, 14, 16, 18, 19, 21], "xxxx": [9, 11, 12, 14, 16, 18, 19, 21], "text": [9, 11, 12, 14, 16, 18, 19, 21], "saw": [9, 11, 12, 14, 16, 18, 19, 21], "consol": [9, 11, 12, 14, 16, 18, 19, 21], "typic": [9, 11, 12, 14], "avoid": [9, 11, 14], "commonli": [9, 11, 12, 14, 16, 18, 19, 21], "nbest": [9, 14], "scale": [9, 14, 16, 18, 19], "down": [9, 14], "lattic": [9, 12, 14, 16, 18, 19], "score": [9, 14], "more": [9, 14, 21], "uniqu": [9, 14], "pkufool": [9, 12, 16], "icefall_asr_aishell_conformer_ctc": 9, "transcrib": [9, 11, 12, 14], "sound": [9, 11, 12, 14, 16, 18, 19, 21], "csukuangfj": [9, 11, 12, 14, 16, 18, 19, 21], "tree": [9, 11, 12, 14, 16, 18, 19, 21], "lang_char": [9, 11], "token": [9, 11, 12, 14, 16, 18, 19, 21], "word": [9, 11, 12, 14, 16, 18, 19, 21], "test_wav": [9, 11, 12, 14, 16, 18, 19, 21], "bac009s0764w0121": [9, 11, 12], "wav": [9, 11, 12, 14, 18, 19, 21], "bac009s0764w0122": [9, 11, 12], "bac009s0764w0123": [9, 11, 12], "tran": [9, 12, 14, 16, 18, 19], "graph": [9, 12, 14, 16, 18, 19], "id": [9, 12, 14, 16, 18, 19], "conveni": [9, 12, 14], "eo": [9, 12, 14], "easili": [9, 12, 14], "obtain": [9, 11, 12, 14, 16, 18, 19], "25": [9, 14, 18, 19, 21], "84": 9, "list": [9, 11, 12, 14, 16, 18, 19], "soxi": [9, 11, 12, 14, 16, 21], "sampl": [9, 11, 12, 14, 16, 21], "16000": [9, 11, 12, 14, 16, 18, 19], "precis": [9, 11, 12, 14, 16, 21], "bit": [9, 11, 12, 14, 16, 21], "04": [9, 11, 12, 14, 16, 18, 19], "67263": [9, 11, 12], "315": [9, 11, 12, 14, 16], "295": [9, 11, 12, 14], "cdda": [9, 11, 12, 14, 16, 21], "sector": [9, 11, 12, 14, 16, 21], "135k": [9, 11, 12], "256k": [9, 11, 12, 14], "encod": [9, 11, 12, 14, 16, 21], "sign": [9, 11, 12, 14, 21], "integ": [9, 11, 12, 14, 21], "pcm": [9, 11, 12, 14, 21], "65840": [9, 11, 12], "625": [9, 11, 12], "132k": [9, 11, 12], "64000": [9, 11, 12], "300": [9, 11, 12, 14], "128k": [9, 11, 12, 21], "displai": [9, 11, 12, 14], "support": [9, 11, 14], "three": [9, 11], "topologi": [9, 14], "07": [9, 11, 12, 14], "53": [9, 16, 19], "707": [9, 14], "229": 9, "sample_r": [9, 11, 14, 21], "subsampling_factor": [9, 11, 14], "nhead": [9, 11, 14], "attention_dim": [9, 11, 14], "512": [9, 11, 14], "num_decoder_lay": [9, 14], "vgg_frontend": [9, 11, 14], "use_feat_batchnorm": [9, 14], "env_info": [9, 11, 14], "releas": [9, 11, 14], "sha1": [9, 11, 14], "f2fd997f752ed11bbef4c306652c433e83f9cf12": 9, "date": [9, 11, 14], "sun": 9, "sep": 9, "46": [9, 14], "33cfe45": 9, "branch": [9, 11, 14], "d57a873": 9, "dirti": [9, 14], "wed": [9, 11, 14], "nov": [9, 14], "hw": 9, "kangwei": 9, "icefall_aishell3": 9, "k2_releas": 9, "__init__": [9, 11, 14], "tokens_fil": 9, "words_fil": [9, 14, 21], "num_path": [9, 14], "ngram_lm_scal": [9, 14], "attention_decoder_scal": [9, 14], "nbest_scal": [9, 14], "sos_id": [9, 14], "eos_id": [9, 14], "num_class": [9, 14, 21], "4336": [9, 11], "sound_fil": [9, 11, 14, 21], "708": [9, 11, 14, 21], "242": [9, 14], "131": [9, 14], "construct": [9, 11, 12, 14, 16, 18, 19, 21], "134": 9, "269": [9, 18, 19], "275": 9, "241": 9, "293": [9, 14], "704": [9, 18], "369": [9, 14], "\u751a": [9, 11], "\u81f3": [9, 11], "\u51fa": [9, 11], "\u73b0": [9, 11], "\u4ea4": [9, 11], "\u6613": [9, 11], "\u51e0": [9, 11], "\u4e4e": [9, 11], "\u505c": [9, 11], "\u6b62": 9, "\u7684": [9, 11, 12], "\u60c5": [9, 11], "\u51b5": [9, 11], "\u4e00": [9, 11], "\u4e8c": [9, 11], "\u7ebf": [9, 11, 12], "\u57ce": [9, 11], "\u5e02": [9, 11], "\u867d": [9, 11], "\u7136": [9, 11], "\u4e5f": [9, 11, 12], "\u5904": [9, 11], "\u4e8e": [9, 11], "\u8c03": [9, 11], "\u6574": [9, 11], "\u4e2d": [9, 11, 12], "\u4f46": [9, 11, 12], "\u56e0": [9, 11], "\u4e3a": [9, 11], "\u805a": [9, 11], "\u96c6": [9, 11], "\u4e86": [9, 11, 12], "\u8fc7": [9, 11], "\u591a": [9, 11], "\u516c": [9, 11], "\u5171": [9, 11], "\u8d44": [9, 11], "\u6e90": [9, 11], "371": 9, "37": [9, 11, 14, 18], "38": [9, 11, 14, 18], "683": 9, "47": [9, 14], "651": [9, 21], "654": 9, "659": 9, "752": 9, "321": 9, "887": 9, "340": 9, "370": 9, "\u751a\u81f3": [9, 12], "\u51fa\u73b0": [9, 12], "\u4ea4\u6613": [9, 12], "\u51e0\u4e4e": [9, 12], "\u505c\u6b62": 9, "\u60c5\u51b5": [9, 12], "\u4e00\u4e8c": [9, 12], "\u57ce\u5e02": [9, 12], "\u867d\u7136": [9, 12], "\u5904\u4e8e": [9, 12], "\u8c03\u6574": [9, 12], "\u56e0\u4e3a": [9, 12], "\u805a\u96c6": [9, 12], "\u8fc7\u591a": [9, 12], "\u516c\u5171": [9, 12], "\u8d44\u6e90": [9, 12], "372": 9, "recor": [9, 14], "highest": [9, 14], "05": [9, 11, 12, 14, 19], "965": 9, "966": 9, "821": 9, "822": 9, "826": 9, "916": 9, "115": [9, 14], "345": 9, "888": 9, "889": 9, "limit": [9, 11, 14], "memori": [9, 11, 14], "upgrad": [9, 14], "pro": [9, 14], "finish": [9, 11, 12, 14, 16, 18, 19, 21], "deploi": [9, 14], "At": [9, 14], "doe": [9, 11, 14, 21], "stream": [9, 14, 18, 19], "home": [9, 14], "checkout": [9, 14], "v2": [9, 14], "cmake": [9, 14], "dcmake_build_typ": [9, 14], "j": [9, 14], "hlg_decod": [9, 14], "four": [9, 14], "ln": [9, 14], "messag": [9, 14], "nn_model": [9, 14], "use_gpu": [9, 14], "word_tabl": [9, 14], "wave": [9, 14], "caution": [9, 14], "relat": [9, 11, 14, 16, 18, 19, 21], "forward": [9, 14], "cpu_jit": [9, 14], "cu": [9, 14], "int": [9, 14], "char": [9, 14], "124": [9, 14], "98": 9, "142": [9, 12, 14], "150": [9, 14], "693": [9, 18], "165": [9, 14], "nnet_output": [9, 14], "182": [9, 16], "180": [9, 14], "489": 9, "45": [9, 11, 14], "216": [9, 14, 18, 19], "mandarin": 10, "corpu": 10, "beij": 10, "shell": 10, "technologi": 10, "ltd": 10, "400": 10, "peopl": 10, "accent": 10, "area": 10, "china": 10, "invit": 10, "particip": 10, "conduct": 10, "quiet": 10, "indoor": 10, "high": 10, "fidel": 10, "microphon": 10, "downsampl": 10, "16khz": 10, "manual": 10, "accuraci": 10, "through": 10, "profession": 10, "annot": 10, "strict": 10, "inspect": 10, "free": 10, "academ": 10, "hope": [10, 14, 16], "moder": 10, "amount": 10, "research": 10, "field": 10, "openslr": 10, "ctc": [10, 13, 15, 17, 20], "stateless": [10, 13], "transduc": [10, 13], "instead": 11, "rnn": 11, "As": [11, 14], "head": 11, "dim": 11, "layer": 11, "feedforward": 11, "2048": 11, "embed": 11, "conv1d": 11, "kernel": 11, "left": 11, "context": 11, "joiner": 11, "nn": 11, "tanh": 11, "linear": 11, "borrow": 11, "ieeexplor": 11, "ieee": 11, "stamp": 11, "jsp": 11, "arnumb": 11, "9054419": 11, "predict": 11, "modif": 11, "right": 11, "charact": 11, "unit": 11, "whose": [11, 14, 16], "vocabulari": 11, "87939824": 11, "88": 11, "optimized_transduc": 11, "extra": 11, "technqiu": 11, "propos": 11, "improv": 11, "end": [11, 21], "furthermor": 11, "maximum": 11, "emit": 11, "per": 11, "frame": 11, "simplifi": 11, "significantli": 11, "degrad": 11, "perform": 11, "exactli": 11, "benchmark": 11, "unprun": 11, "advantag": 11, "minim": 11, "pruned_transducer_stateless": 11, "prune": 11, "other": [11, 13, 14, 16, 18, 19, 21], "altern": 11, "though": 11, "transducer_stateless_modifi": 11, "option": [11, 16, 18, 19, 21], "pr": 11, "gb": 11, "ram": 11, "small": [11, 18, 19, 21], "tri": 11, "prob": 11, "appli": 11, "configur": [11, 16, 18, 19, 21], "c": [11, 12, 21], "lagz6hrcqxoigbfd5e0y3q": 11, "2022": 11, "03t14": 11, "8477": 11, "greedy_search": 11, "sym": 11, "beam_search": 11, "decoding_method": 11, "beam_4": 11, "02": [11, 14, 19], "28": [11, 14, 16], "ensur": 11, "give": 11, "poor": 11, "531": [11, 12], "994": [11, 14], "176": [11, 14], "027": 11, "encoder_out_dim": 11, "dim_feedforward": 11, "num_encoder_lay": 11, "f4fefe4882bc0ae59af951da3f47335d5495ef71": 11, "thu": [11, 14, 16], "feb": 11, "miss": [11, 14], "50d2281": 11, "mar": 11, "hostnam": 11, "de": 11, "74279": 11, "0815224919": 11, "75d558775b": 11, "mmnv8": 11, "ip": 11, "177": [11, 12, 14], "72": [11, 14], "context_s": 11, "max_sym_per_fram": 11, "blank_id": 11, "vocab_s": 11, "878": [11, 19], "257": [11, 18, 19], "880": 11, "267": [11, 18, 19], "891": 11, "273": 11, "__floordiv__": 11, "length": 11, "x_len": 11, "163": [11, 14], "320": 11, "\u6ede": 11, "322": 11, "285": [11, 14], "759": 11, "760": 11, "919": 11, "922": 11, "046": 11, "047": 11, "319": [11, 14], "214": [11, 14], "215": [11, 14, 16], "402": 11, "topk_hyp_index": 11, "topk_index": 11, "logit": 11, "583": [11, 19], "2000": 12, "lji9mwuorlow3jkdhxwk8a": 12, "13t11": 12, "4454": 12, "icefall_asr_aishell_tdnn_lstm_ctc": 12, "858": [12, 14], "389": [12, 14], "154": 12, "161": [12, 14], "536": 12, "171": [12, 14, 18, 19], "539": 12, "917": 12, "207": [12, 14], "129": 12, "\u505c\u6ede": 12, "222": [12, 14], "task": 13, "well": [13, 21], "ligru": [13, 17], "full": 14, "libri": 14, "960": 14, "subset": 14, "3x960": 14, "2880": 14, "lzgnetjwrxc3yghnmd4kpw": 14, "24t16": 14, "43": 14, "4540": 14, "sentenc": 14, "piec": 14, "And": 14, "neither": 14, "nor": 14, "vocab": 14, "work": 14, "5000": 14, "lang_bpe_500": 14, "44": [14, 18, 19], "033": 14, "538": 14, "full_libri": 14, "406": 14, "464": 14, "548": 14, "776": 14, "652": [14, 21], "109226120": 14, "714": [14, 18], "473": 14, "944": 14, "1328": 14, "54": [14, 16, 18, 19], "443": [14, 16], "2563": 14, "56": [14, 18], "494": 14, "592": 14, "331": [14, 16], "1715": 14, "52576": 14, "1424": 14, "807": 14, "506": 14, "808": [14, 18], "522": 14, "362": 14, "203": 14, "565": 14, "1477": 14, "106": 14, "2922": 14, "208": 14, "4295": 14, "52343": 14, "396": 14, "3584": 14, "433": 14, "680": [14, 18], "jit": 14, "bpe": 14, "_pickl": 14, "unpicklingerror": 14, "kei": 14, "hlg_modifi": 14, "g_4_gram": [14, 16, 18, 19], "1089": [14, 16], "134686": [14, 16], "0001": [14, 16], "1221": [14, 16], "135766": [14, 16], "0002": [14, 16], "reproduc": 14, "our": 14, "106000": [14, 16], "496": [14, 16], "875": [14, 16], "212k": 14, "267440": [14, 16], "1253": [14, 16], "535k": 14, "83": [14, 16], "77200": [14, 16], "361": [14, 16], "154k": 14, "554": 14, "260": 14, "bpe_model": 14, "7178d67e594bc7fa89c2b331ad7bd1c62a6a9eb4": 14, "tue": 14, "oct": 14, "22": [14, 18, 19, 21], "8d93169": 14, "266": [14, 16], "268": [14, 16], "600": 14, "601": 14, "758": 14, "025": 14, "204": 14, "425": 14, "earli": [14, 16], "nightfal": [14, 16], "THE": [14, 16], "yellow": [14, 16], "lamp": [14, 16], "light": [14, 16], "AND": [14, 16], "THERE": [14, 16], "squalid": [14, 16], "quarter": [14, 16], "OF": [14, 16], "broffel": 14, "god": [14, 16], "AS": [14, 16], "direct": [14, 16], "consequ": [14, 16], "sin": [14, 16], "man": [14, 16], "punish": [14, 16], "had": [14, 16], "her": [14, 16], "love": [14, 16], "child": [14, 16], "ON": [14, 16], "THAT": [14, 16], "dishonor": [14, 16], "osom": 14, "TO": [14, 16], "parent": [14, 16], "forev": [14, 16], "WITH": [14, 16], "race": [14, 16], "descent": [14, 16], "mortal": [14, 16], "BE": [14, 16], "bless": [14, 16], "soul": [14, 16], "IN": [14, 16], "heaven": [14, 16], "yet": [14, 16], "THESE": [14, 16], "thought": [14, 16], "affect": [14, 16], "hester": [14, 16], "prynn": [14, 16], "less": [14, 16, 21], "apprehens": [14, 16], "427": 14, "723": 14, "775": 14, "881": 14, "951": 14, "352": 14, "234": 14, "384": 14, "brothel": [14, 16], "bosom": [14, 16], "whole": [14, 16, 18, 19], "ngram": [14, 16, 18, 19], "Its": 14, "857": 14, "979": 14, "980": 14, "055": 14, "117": 14, "051": 14, "363": 14, "959": [14, 19], "546": 14, "598": 14, "599": [14, 16], "833": 14, "834": 14, "915": 14, "076": 14, "110": 14, "329": 14, "397": 14, "999": 14, "concaten": 14, "bucket": 14, "sampler": 14, "1000": 14, "ctc_decod": 14, "ngram_lm_rescor": 14, "attention_rescor": 14, "kind": 14, "316": 14, "118": 14, "58": 14, "221": 14, "125": [14, 21], "136": 14, "144": 14, "159": [14, 21], "543": 14, "174": 14, "topo": 14, "547": 14, "729": 14, "111": 14, "702": 14, "703": 14, "545": 14, "122": 14, "280": 14, "135": [14, 21], "153": [14, 21], "945": 14, "475": 14, "191": [14, 18, 19], "398": 14, "199": [14, 16], "515": 14, "205": 14, "w": [14, 18, 19], "deseri": 14, "441": 14, "fsaclass": 14, "loadfsa": 14, "const": 14, "string": 14, "c10": 14, "ignor": 14, "non": 14, "attribut": 14, "dummi": 14, "589": 14, "attention_scal": 14, "656": 14, "162": 14, "169": [14, 18, 19], "188": 14, "624": 14, "519": [14, 19], "632": 14, "645": [14, 21], "243": 14, "970": 14, "303": 14, "179": 14, "icefall_asr_librispeech_tdnn": 16, "lstm_ctc": 16, "flac": 16, "116k": 16, "140k": 16, "343k": 16, "164k": 16, "105k": 16, "174k": 16, "usag": [16, 18, 19, 21], "pretraind": 16, "168": 16, "170": 16, "581": 16, "584": [16, 19], "209": 16, "791": 16, "245": 16, "099": 16, "methond": [16, 18, 19], "725": 16, "403": 16, "631": 16, "010": 16, "tdnn_ligru_ctc": 18, "enough": [18, 19, 21], "luomingshuang": [18, 19], "icefall_asr_timit_tdnn_ligru_ctc": 18, "pretrained_average_9_25": 18, "fdhc0_si1559": [18, 19], "felc0_si756": [18, 19], "fmgd0_si1564": [18, 19], "ffprobe": [18, 19], "show_format": [18, 19], "nistspher": [18, 19], "database_id": [18, 19], "database_vers": [18, 19], "utterance_id": [18, 19], "dhc0_si1559": [18, 19], "sample_min": [18, 19], "4176": [18, 19], "sample_max": [18, 19], "5984": [18, 19], "bitrat": [18, 19], "258": [18, 19], "audio": [18, 19], "pcm_s16le": [18, 19], "hz": [18, 19], "s16": [18, 19], "256": [18, 19], "elc0_si756": [18, 19], "1546": [18, 19], "1989": [18, 19], "mgd0_si1564": [18, 19], "7626": [18, 19], "10573": [18, 19], "660": 18, "183": [18, 19], "695": 18, "697": 18, "210": [18, 19], "829": 18, "sil": [18, 19], "dh": [18, 19], "ih": [18, 19], "uw": [18, 19], "ah": [18, 19], "ii": [18, 19], "z": [18, 19], "aa": [18, 19], "ei": [18, 19], "dx": [18, 19], "uh": [18, 19], "ng": [18, 19], "th": [18, 19], "eh": [18, 19], "jh": [18, 19], "er": [18, 19], "ai": [18, 19], "hh": [18, 19], "aw": 18, "ae": [18, 19], "705": 18, "715": 18, "720": 18, "251": [18, 19], "348": 18, "ch": 18, "icefall_asr_timit_tdnn_lstm_ctc": 19, "pretrained_average_16_25": 19, "816": 19, "827": 19, "387": 19, "unk": 19, "739": 19, "971": 19, "977": 19, "978": 19, "981": 19, "ow": 19, "ykubhb5wrmosxykid1z9eg": 21, "23t23": 21, "sinc": 21, "icefall_asr_yesno_tdnn": 21, "l_disambig": 21, "lexicon_disambig": 21, "arpa": 21, "0_0_0_1_0_0_0_1": 21, "0_0_1_0_0_0_1_0": 21, "0_0_1_0_0_1_1_1": 21, "0_0_1_0_1_0_0_1": 21, "0_0_1_1_0_0_0_1": 21, "0_0_1_1_0_1_1_0": 21, "0_0_1_1_1_0_0_0": 21, "0_0_1_1_1_1_0_0": 21, "0_1_0_0_0_1_0_0": 21, "0_1_0_0_1_0_1_0": 21, "0_1_0_1_0_0_0_0": 21, "0_1_0_1_1_1_0_0": 21, "0_1_1_0_0_1_1_1": 21, "0_1_1_1_0_0_1_0": 21, "0_1_1_1_1_0_1_0": 21, "1_0_0_0_0_0_0_0": 21, "1_0_0_0_0_0_1_1": 21, "1_0_0_1_0_1_1_1": 21, "1_0_1_1_0_1_1_1": 21, "1_0_1_1_1_1_0_1": 21, "1_1_0_0_0_1_1_1": 21, "1_1_0_0_1_0_1_1": 21, "1_1_0_1_0_1_0_0": 21, "1_1_0_1_1_0_0_1": 21, "1_1_0_1_1_1_1_0": 21, "1_1_1_0_0_1_0_1": 21, "1_1_1_0_1_0_1_0": 21, "1_1_1_1_0_0_1_0": 21, "1_1_1_1_1_0_0_0": 21, "1_1_1_1_1_1_1_1": 21, "54080": 21, "507": 21, "108k": 21, "No": 21, "ye": 21, "hebrew": 21, "NO": 21, "621": 21, "119": 21, "127": 21, "650": 21, "139": 21, "143": 21, "198": 21, "181": 21, "186": 21, "187": 21, "287": 21, "correctli": 21, "simplest": 21}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"follow": 0, "code": 0, "style": 0, "contribut": [1, 3], "document": 1, "how": 2, "creat": [2, 8], "recip": [2, 13], "data": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "prepar": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "train": [2, 5, 8, 9, 11, 12, 14, 16, 18, 19, 21], "decod": [2, 8, 9, 11, 12, 14, 16, 18, 19, 21], "pre": [2, 5, 9, 11, 12, 14, 16, 18, 19, 21], "model": [2, 5, 9, 11, 12, 14, 16, 18, 19, 21], "huggingfac": [4, 6], "space": 6, "youtub": [6, 8], "video": [6, 8], "icefal": [7, 8], "content": [7, 13], "instal": [8, 9, 11, 12, 14, 16, 18, 19], "0": 8, "pytorch": 8, "torchaudio": 8, "1": [8, 9, 11, 12, 14], "k2": 8, "2": [8, 9, 11, 12, 14], "lhots": 8, "3": [8, 9, 11, 14], "download": [8, 9, 11, 12, 14, 16, 18, 19, 21], "exampl": [8, 9, 11, 12, 14], "virtual": 8, "environ": 8, "activ": 8, "your": 8, "4": 8, "5": 8, "test": 8, "conform": [9, 14], "ctc": [9, 12, 14, 16, 18, 19, 21], "configur": [9, 12, 14], "option": [9, 12, 14], "log": [9, 11, 12, 14], "usag": [9, 11, 12, 14], "case": [9, 11, 12, 14], "kaldifeat": [9, 11, 12, 14, 16, 18, 19, 21], "hlg": [9, 12, 14], "attent": [9, 14], "rescor": [9, 14], "colab": [9, 11, 12, 14, 16, 18, 19, 21], "notebook": [9, 11, 12, 14, 16, 18, 19, 21], "deploy": [9, 14], "c": [9, 14], "aishel": 10, "stateless": 11, "transduc": 11, "The": 11, "loss": 11, "todo": 11, "greedi": 11, "search": 11, "beam": 11, "modifi": 11, "tdnn": [12, 16, 18, 19, 21], "lstm": [12, 16, 19], "tabl": 13, "lm": 14, "comput": 14, "wer": 14, "librispeech": 15, "infer": [16, 18, 19, 21], "timit": 17, "ligru": 18, "yesno": 20}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.todo": 2, "sphinx": 56}}) \ No newline at end of file