mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-08-09 01:52:41 +00:00
653 lines
76 KiB
HTML
653 lines
76 KiB
HTML
<!DOCTYPE html>
|
||
<html class="writer-html5" lang="en" >
|
||
<head>
|
||
<meta charset="utf-8" /><meta name="generator" content="Docutils 0.18.1: http://docutils.sourceforge.net/" />
|
||
|
||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||
<title>Installation — icefall 0.1 documentation</title>
|
||
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
|
||
<link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
|
||
<!--[if lt IE 9]>
|
||
<script src="../_static/js/html5shiv.min.js"></script>
|
||
<![endif]-->
|
||
|
||
<script src="../_static/jquery.js"></script>
|
||
<script src="../_static/_sphinx_javascript_frameworks_compat.js"></script>
|
||
<script data-url_root="../" id="documentation_options" src="../_static/documentation_options.js"></script>
|
||
<script src="../_static/doctools.js"></script>
|
||
<script src="../_static/sphinx_highlight.js"></script>
|
||
<script src="../_static/js/theme.js"></script>
|
||
<link rel="index" title="Index" href="../genindex.html" />
|
||
<link rel="search" title="Search" href="../search.html" />
|
||
<link rel="next" title="Docker" href="../docker/index.html" />
|
||
<link rel="prev" title="Icefall" href="../index.html" />
|
||
</head>
|
||
|
||
<body class="wy-body-for-nav">
|
||
<div class="wy-grid-for-nav">
|
||
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
|
||
<div class="wy-side-scroll">
|
||
<div class="wy-side-nav-search" >
|
||
|
||
|
||
|
||
<a href="../index.html" class="icon icon-home">
|
||
icefall
|
||
</a>
|
||
<div role="search">
|
||
<form id="rtd-search-form" class="wy-form" action="../search.html" method="get">
|
||
<input type="text" name="q" placeholder="Search docs" aria-label="Search docs" />
|
||
<input type="hidden" name="check_keywords" value="yes" />
|
||
<input type="hidden" name="area" value="default" />
|
||
</form>
|
||
</div>
|
||
</div><div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="Navigation menu">
|
||
<p class="caption" role="heading"><span class="caption-text">Contents:</span></p>
|
||
<ul class="current">
|
||
<li class="toctree-l1 current"><a class="current reference internal" href="#">Installation</a><ul>
|
||
<li class="toctree-l2"><a class="reference internal" href="#install-cuda-toolkit-and-cudnn">(0) Install CUDA toolkit and cuDNN</a></li>
|
||
<li class="toctree-l2"><a class="reference internal" href="#install-torch-and-torchaudio">(1) Install torch and torchaudio</a></li>
|
||
<li class="toctree-l2"><a class="reference internal" href="#install-k2">(2) Install k2</a></li>
|
||
<li class="toctree-l2"><a class="reference internal" href="#install-lhotse">(3) Install lhotse</a></li>
|
||
<li class="toctree-l2"><a class="reference internal" href="#download-icefall">(4) Download icefall</a></li>
|
||
<li class="toctree-l2"><a class="reference internal" href="#installation-example">Installation example</a><ul>
|
||
<li class="toctree-l3"><a class="reference internal" href="#create-a-virtual-environment">(1) Create a virtual environment</a></li>
|
||
<li class="toctree-l3"><a class="reference internal" href="#id1">(2) Install CUDA toolkit and cuDNN</a></li>
|
||
<li class="toctree-l3"><a class="reference internal" href="#id2">(3) Install torch and torchaudio</a></li>
|
||
<li class="toctree-l3"><a class="reference internal" href="#id3">(4) Install k2</a></li>
|
||
<li class="toctree-l3"><a class="reference internal" href="#id5">(5) Install lhotse</a></li>
|
||
<li class="toctree-l3"><a class="reference internal" href="#id6">(6) Download icefall</a></li>
|
||
</ul>
|
||
</li>
|
||
<li class="toctree-l2"><a class="reference internal" href="#test-your-installation">Test Your Installation</a><ul>
|
||
<li class="toctree-l3"><a class="reference internal" href="#data-preparation">Data preparation</a></li>
|
||
<li class="toctree-l3"><a class="reference internal" href="#training">Training</a></li>
|
||
<li class="toctree-l3"><a class="reference internal" href="#decoding">Decoding</a></li>
|
||
</ul>
|
||
</li>
|
||
<li class="toctree-l2"><a class="reference internal" href="#youtube-video">YouTube Video</a></li>
|
||
</ul>
|
||
</li>
|
||
<li class="toctree-l1"><a class="reference internal" href="../docker/index.html">Docker</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="../faqs.html">Frequently Asked Questions (FAQs)</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="../model-export/index.html">Model export</a></li>
|
||
</ul>
|
||
<ul>
|
||
<li class="toctree-l1"><a class="reference internal" href="../recipes/index.html">Recipes</a></li>
|
||
</ul>
|
||
<ul>
|
||
<li class="toctree-l1"><a class="reference internal" href="../contributing/index.html">Contributing</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="../huggingface/index.html">Huggingface</a></li>
|
||
</ul>
|
||
<ul>
|
||
<li class="toctree-l1"><a class="reference internal" href="../decoding-with-langugage-models/index.html">Decoding with language models</a></li>
|
||
</ul>
|
||
|
||
</div>
|
||
</div>
|
||
</nav>
|
||
|
||
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"><nav class="wy-nav-top" aria-label="Mobile navigation menu" >
|
||
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
|
||
<a href="../index.html">icefall</a>
|
||
</nav>
|
||
|
||
<div class="wy-nav-content">
|
||
<div class="rst-content">
|
||
<div role="navigation" aria-label="Page navigation">
|
||
<ul class="wy-breadcrumbs">
|
||
<li><a href="../index.html" class="icon icon-home" aria-label="Home"></a></li>
|
||
<li class="breadcrumb-item active">Installation</li>
|
||
<li class="wy-breadcrumbs-aside">
|
||
<a href="https://github.com/k2-fsa/icefall/blob/master/docs/source/installation/index.rst" class="fa fa-github"> Edit on GitHub</a>
|
||
</li>
|
||
</ul>
|
||
<hr/>
|
||
</div>
|
||
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
|
||
<div itemprop="articleBody">
|
||
|
||
<section id="installation">
|
||
<span id="install-icefall"></span><h1>Installation<a class="headerlink" href="#installation" title="Permalink to this heading"></a></h1>
|
||
<div class="admonition hint">
|
||
<p class="admonition-title">Hint</p>
|
||
<p>We also provide <a class="reference internal" href="../docker/index.html#icefall-docker"><span class="std std-ref">Docker</span></a> support, which has already setup
|
||
the environment for you.</p>
|
||
</div>
|
||
<div class="admonition hint">
|
||
<p class="admonition-title">Hint</p>
|
||
<p>We have a colab notebook guiding you step by step to setup the environment.</p>
|
||
<p><a class="reference external" href="https://colab.research.google.com/drive/1tIjjzaJc3IvGyKiMCDWO-TSnBgkcuN3B?usp=sharing"><img alt="yesno colab notebook" src="https://colab.research.google.com/assets/colab-badge.svg" /></a></p>
|
||
</div>
|
||
<p><a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a> depends on <a class="reference external" href="https://github.com/k2-fsa/k2">k2</a> and <a class="reference external" href="https://github.com/lhotse-speech/lhotse">lhotse</a>.</p>
|
||
<p>We recommend that you use the following steps to install the dependencies.</p>
|
||
<ul class="simple">
|
||
<li><ol class="arabic simple" start="0">
|
||
<li><p>Install CUDA toolkit and cuDNN</p></li>
|
||
</ol>
|
||
</li>
|
||
<li><ol class="arabic simple">
|
||
<li><p>Install <a class="reference external" href="https://github.com/pytorch/pytorch">torch</a> and <a class="reference external" href="https://github.com/pytorch/audio">torchaudio</a></p></li>
|
||
</ol>
|
||
</li>
|
||
<li><ol class="arabic simple" start="2">
|
||
<li><p>Install <a class="reference external" href="https://github.com/k2-fsa/k2">k2</a></p></li>
|
||
</ol>
|
||
</li>
|
||
<li><ol class="arabic simple" start="3">
|
||
<li><p>Install <a class="reference external" href="https://github.com/lhotse-speech/lhotse">lhotse</a></p></li>
|
||
</ol>
|
||
</li>
|
||
</ul>
|
||
<div class="admonition caution">
|
||
<p class="admonition-title">Caution</p>
|
||
<p>Installation order matters.</p>
|
||
</div>
|
||
<section id="install-cuda-toolkit-and-cudnn">
|
||
<h2>(0) Install CUDA toolkit and cuDNN<a class="headerlink" href="#install-cuda-toolkit-and-cudnn" title="Permalink to this heading"></a></h2>
|
||
<p>Please refer to
|
||
<a class="reference external" href="https://k2-fsa.github.io/k2/installation/cuda-cudnn.html">https://k2-fsa.github.io/k2/installation/cuda-cudnn.html</a>
|
||
to install CUDA and cuDNN.</p>
|
||
</section>
|
||
<section id="install-torch-and-torchaudio">
|
||
<h2>(1) Install torch and torchaudio<a class="headerlink" href="#install-torch-and-torchaudio" title="Permalink to this heading"></a></h2>
|
||
<p>Please refer <a class="reference external" href="https://pytorch.org/">https://pytorch.org/</a> to install <a class="reference external" href="https://github.com/pytorch/pytorch">torch</a> and <a class="reference external" href="https://github.com/pytorch/audio">torchaudio</a>.</p>
|
||
<div class="admonition caution">
|
||
<p class="admonition-title">Caution</p>
|
||
<p>Please install torch and torchaudio at the same time.</p>
|
||
</div>
|
||
</section>
|
||
<section id="install-k2">
|
||
<h2>(2) Install k2<a class="headerlink" href="#install-k2" title="Permalink to this heading"></a></h2>
|
||
<p>Please refer to <a class="reference external" href="https://k2-fsa.github.io/k2/installation/index.html">https://k2-fsa.github.io/k2/installation/index.html</a>
|
||
to install <a class="reference external" href="https://github.com/k2-fsa/k2">k2</a>.</p>
|
||
<div class="admonition caution">
|
||
<p class="admonition-title">Caution</p>
|
||
<p>Please don’t change your installed PyTorch after you have installed k2.</p>
|
||
</div>
|
||
<div class="admonition note">
|
||
<p class="admonition-title">Note</p>
|
||
<p>We suggest that you install k2 from pre-compiled wheels by following
|
||
<a class="reference external" href="https://k2-fsa.github.io/k2/installation/from_wheels.html">https://k2-fsa.github.io/k2/installation/from_wheels.html</a></p>
|
||
</div>
|
||
<div class="admonition hint">
|
||
<p class="admonition-title">Hint</p>
|
||
<p>Please always install the latest version of <a class="reference external" href="https://github.com/k2-fsa/k2">k2</a>.</p>
|
||
</div>
|
||
</section>
|
||
<section id="install-lhotse">
|
||
<h2>(3) Install lhotse<a class="headerlink" href="#install-lhotse" title="Permalink to this heading"></a></h2>
|
||
<p>Please refer to <a class="reference external" href="https://lhotse.readthedocs.io/en/latest/getting-started.html#installation">https://lhotse.readthedocs.io/en/latest/getting-started.html#installation</a>
|
||
to install <a class="reference external" href="https://github.com/lhotse-speech/lhotse">lhotse</a>.</p>
|
||
<div class="admonition hint">
|
||
<p class="admonition-title">Hint</p>
|
||
<p>We strongly recommend you to use:</p>
|
||
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">pip</span> <span class="n">install</span> <span class="n">git</span><span class="o">+</span><span class="n">https</span><span class="p">:</span><span class="o">//</span><span class="n">github</span><span class="o">.</span><span class="n">com</span><span class="o">/</span><span class="n">lhotse</span><span class="o">-</span><span class="n">speech</span><span class="o">/</span><span class="n">lhotse</span>
|
||
</pre></div>
|
||
</div>
|
||
<p>to install the latest version of <a class="reference external" href="https://github.com/lhotse-speech/lhotse">lhotse</a>.</p>
|
||
</div>
|
||
</section>
|
||
<section id="download-icefall">
|
||
<h2>(4) Download icefall<a class="headerlink" href="#download-icefall" title="Permalink to this heading"></a></h2>
|
||
<p><a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a> is a collection of Python scripts; what you need is to download it
|
||
and set the environment variable <code class="docutils literal notranslate"><span class="pre">PYTHONPATH</span></code> to point to it.</p>
|
||
<p>Assume you want to place <a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a> in the folder <code class="docutils literal notranslate"><span class="pre">/tmp</span></code>. The
|
||
following commands show you how to setup <a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a>:</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="nb">cd</span><span class="w"> </span>/tmp
|
||
git<span class="w"> </span>clone<span class="w"> </span>https://github.com/k2-fsa/icefall
|
||
<span class="nb">cd</span><span class="w"> </span>icefall
|
||
pip<span class="w"> </span>install<span class="w"> </span>-r<span class="w"> </span>requirements.txt
|
||
<span class="nb">export</span><span class="w"> </span><span class="nv">PYTHONPATH</span><span class="o">=</span>/tmp/icefall:<span class="nv">$PYTHONPATH</span>
|
||
</pre></div>
|
||
</div>
|
||
<div class="admonition hint">
|
||
<p class="admonition-title">Hint</p>
|
||
<p>You can put several versions of <a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a> in the same virtual environment.
|
||
To switch among different versions of <a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a>, just set <code class="docutils literal notranslate"><span class="pre">PYTHONPATH</span></code>
|
||
to point to the version you want.</p>
|
||
</div>
|
||
</section>
|
||
<section id="installation-example">
|
||
<h2>Installation example<a class="headerlink" href="#installation-example" title="Permalink to this heading"></a></h2>
|
||
<p>The following shows an example about setting up the environment.</p>
|
||
<section id="create-a-virtual-environment">
|
||
<h3>(1) Create a virtual environment<a class="headerlink" href="#create-a-virtual-environment" title="Permalink to this heading"></a></h3>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span>kuangfangjun:~$<span class="w"> </span>virtualenv<span class="w"> </span>-p<span class="w"> </span>python3.8<span class="w"> </span>test-icefall
|
||
created<span class="w"> </span>virtual<span class="w"> </span>environment<span class="w"> </span>CPython3.8.0.final.0-64<span class="w"> </span><span class="k">in</span><span class="w"> </span>9422ms
|
||
<span class="w"> </span>creator<span class="w"> </span>CPython3Posix<span class="o">(</span><span class="nv">dest</span><span class="o">=</span>/star-fj/fangjun/test-icefall,<span class="w"> </span><span class="nv">clear</span><span class="o">=</span>False,<span class="w"> </span><span class="nv">no_vcs_ignore</span><span class="o">=</span>False,<span class="w"> </span><span class="nv">global</span><span class="o">=</span>False<span class="o">)</span>
|
||
<span class="w"> </span>seeder<span class="w"> </span>FromAppData<span class="o">(</span><span class="nv">download</span><span class="o">=</span>False,<span class="w"> </span><span class="nv">pip</span><span class="o">=</span>bundle,<span class="w"> </span><span class="nv">setuptools</span><span class="o">=</span>bundle,<span class="w"> </span><span class="nv">wheel</span><span class="o">=</span>bundle,<span class="w"> </span><span class="nv">via</span><span class="o">=</span>copy,<span class="w"> </span><span class="nv">app_data_dir</span><span class="o">=</span>/star-fj/fangjun/.local/share/virtualenv<span class="o">)</span>
|
||
<span class="w"> </span>added<span class="w"> </span>seed<span class="w"> </span>packages:<span class="w"> </span><span class="nv">pip</span><span class="o">==</span><span class="m">22</span>.3.1,<span class="w"> </span><span class="nv">setuptools</span><span class="o">==</span><span class="m">65</span>.6.3,<span class="w"> </span><span class="nv">wheel</span><span class="o">==</span><span class="m">0</span>.38.4
|
||
<span class="w"> </span>activators<span class="w"> </span>BashActivator,CShellActivator,FishActivator,NushellActivator,PowerShellActivator,PythonActivator
|
||
|
||
kuangfangjun:~$<span class="w"> </span><span class="nb">source</span><span class="w"> </span>test-icefall/bin/activate
|
||
|
||
<span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$
|
||
</pre></div>
|
||
</div>
|
||
</section>
|
||
<section id="id1">
|
||
<h3>(2) Install CUDA toolkit and cuDNN<a class="headerlink" href="#id1" title="Permalink to this heading"></a></h3>
|
||
<p>You need to determine the version of CUDA toolkit to install.</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span>nvidia-smi<span class="w"> </span><span class="p">|</span><span class="w"> </span>head<span class="w"> </span>-n<span class="w"> </span><span class="m">4</span>
|
||
|
||
Wed<span class="w"> </span>Jul<span class="w"> </span><span class="m">26</span><span class="w"> </span><span class="m">21</span>:57:49<span class="w"> </span><span class="m">2023</span>
|
||
+-----------------------------------------------------------------------------+
|
||
<span class="p">|</span><span class="w"> </span>NVIDIA-SMI<span class="w"> </span><span class="m">510</span>.47.03<span class="w"> </span>Driver<span class="w"> </span>Version:<span class="w"> </span><span class="m">510</span>.47.03<span class="w"> </span>CUDA<span class="w"> </span>Version:<span class="w"> </span><span class="m">11</span>.6<span class="w"> </span><span class="p">|</span>
|
||
<span class="p">|</span>-------------------------------+----------------------+----------------------+
|
||
</pre></div>
|
||
</div>
|
||
<p>You can choose any CUDA version that is <code class="docutils literal notranslate"><span class="pre">not</span></code> greater than the version printed by <code class="docutils literal notranslate"><span class="pre">nvidia-smi</span></code>.
|
||
In our case, we can choose any version <code class="docutils literal notranslate"><span class="pre"><=</span> <span class="pre">11.6</span></code>.</p>
|
||
<p>We will use <code class="docutils literal notranslate"><span class="pre">CUDA</span> <span class="pre">11.6</span></code> in this example. Please follow
|
||
<a class="reference external" href="https://k2-fsa.github.io/k2/installation/cuda-cudnn.html#cuda-11-6">https://k2-fsa.github.io/k2/installation/cuda-cudnn.html#cuda-11-6</a>
|
||
to install CUDA toolkit and cuDNN if you have not done that before.</p>
|
||
<p>After installing CUDA toolkit, you can use the following command to verify it:</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span>nvcc<span class="w"> </span>--version
|
||
|
||
nvcc:<span class="w"> </span>NVIDIA<span class="w"> </span><span class="o">(</span>R<span class="o">)</span><span class="w"> </span>Cuda<span class="w"> </span>compiler<span class="w"> </span>driver
|
||
Copyright<span class="w"> </span><span class="o">(</span>c<span class="o">)</span><span class="w"> </span><span class="m">2005</span>-2019<span class="w"> </span>NVIDIA<span class="w"> </span>Corporation
|
||
Built<span class="w"> </span>on<span class="w"> </span>Wed_Oct_23_19:24:38_PDT_2019
|
||
Cuda<span class="w"> </span>compilation<span class="w"> </span>tools,<span class="w"> </span>release<span class="w"> </span><span class="m">10</span>.2,<span class="w"> </span>V10.2.89
|
||
</pre></div>
|
||
</div>
|
||
</section>
|
||
<section id="id2">
|
||
<h3>(3) Install torch and torchaudio<a class="headerlink" href="#id2" title="Permalink to this heading"></a></h3>
|
||
<p>Since we have selected CUDA toolkit <code class="docutils literal notranslate"><span class="pre">11.6</span></code>, we have to install a version of <a class="reference external" href="https://github.com/pytorch/pytorch">torch</a>
|
||
that is compiled against CUDA <code class="docutils literal notranslate"><span class="pre">11.6</span></code>. We select <code class="docutils literal notranslate"><span class="pre">torch</span> <span class="pre">1.13.0+cu116</span></code> in this
|
||
example.</p>
|
||
<p>After selecting the version of <a class="reference external" href="https://github.com/pytorch/pytorch">torch</a> to install, we need to also install
|
||
a compatible version of <a class="reference external" href="https://github.com/pytorch/audio">torchaudio</a>, which is <code class="docutils literal notranslate"><span class="pre">0.13.0+cu116</span></code> in our case.</p>
|
||
<p>Please refer to <a class="reference external" href="https://pytorch.org/audio/stable/installation.html#compatibility-matrix">https://pytorch.org/audio/stable/installation.html#compatibility-matrix</a>
|
||
to select an appropriate version of <a class="reference external" href="https://github.com/pytorch/audio">torchaudio</a> to install if you use a different
|
||
version of <a class="reference external" href="https://github.com/pytorch/pytorch">torch</a>.</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span>pip<span class="w"> </span>install<span class="w"> </span><span class="nv">torch</span><span class="o">==</span><span class="m">1</span>.13.0+cu116<span class="w"> </span><span class="nv">torchaudio</span><span class="o">==</span><span class="m">0</span>.13.0+cu116<span class="w"> </span>-f<span class="w"> </span>https://download.pytorch.org/whl/torch_stable.html
|
||
|
||
Looking<span class="w"> </span><span class="k">in</span><span class="w"> </span>links:<span class="w"> </span>https://download.pytorch.org/whl/torch_stable.html
|
||
Collecting<span class="w"> </span><span class="nv">torch</span><span class="o">==</span><span class="m">1</span>.13.0+cu116
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://download.pytorch.org/whl/cu116/torch-1.13.0%2Bcu116-cp38-cp38-linux_x86_64.whl<span class="w"> </span><span class="o">(</span><span class="m">1983</span>.0<span class="w"> </span>MB<span class="o">)</span>
|
||
<span class="w"> </span>________________________________________<span class="w"> </span><span class="m">2</span>.0/2.0<span class="w"> </span>GB<span class="w"> </span><span class="m">764</span>.4<span class="w"> </span>kB/s<span class="w"> </span>eta<span class="w"> </span><span class="m">0</span>:00:00
|
||
Collecting<span class="w"> </span><span class="nv">torchaudio</span><span class="o">==</span><span class="m">0</span>.13.0+cu116
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://download.pytorch.org/whl/cu116/torchaudio-0.13.0%2Bcu116-cp38-cp38-linux_x86_64.whl<span class="w"> </span><span class="o">(</span><span class="m">4</span>.2<span class="w"> </span>MB<span class="o">)</span>
|
||
<span class="w"> </span>________________________________________<span class="w"> </span><span class="m">4</span>.2/4.2<span class="w"> </span>MB<span class="w"> </span><span class="m">1</span>.3<span class="w"> </span>MB/s<span class="w"> </span>eta<span class="w"> </span><span class="m">0</span>:00:00
|
||
Requirement<span class="w"> </span>already<span class="w"> </span>satisfied:<span class="w"> </span>typing-extensions<span class="w"> </span><span class="k">in</span><span class="w"> </span>/star-fj/fangjun/test-icefall/lib/python3.8/site-packages<span class="w"> </span><span class="o">(</span>from<span class="w"> </span><span class="nv">torch</span><span class="o">==</span><span class="m">1</span>.13.0+cu116<span class="o">)</span><span class="w"> </span><span class="o">(</span><span class="m">4</span>.7.1<span class="o">)</span>
|
||
Installing<span class="w"> </span>collected<span class="w"> </span>packages:<span class="w"> </span>torch,<span class="w"> </span>torchaudio
|
||
Successfully<span class="w"> </span>installed<span class="w"> </span>torch-1.13.0+cu116<span class="w"> </span>torchaudio-0.13.0+cu116
|
||
</pre></div>
|
||
</div>
|
||
<p>Verify that <a class="reference external" href="https://github.com/pytorch/pytorch">torch</a> and <a class="reference external" href="https://github.com/pytorch/audio">torchaudio</a> are successfully installed:</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span>python3<span class="w"> </span>-c<span class="w"> </span><span class="s2">"import torch; print(torch.__version__)"</span>
|
||
|
||
<span class="m">1</span>.13.0+cu116
|
||
|
||
<span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span>python3<span class="w"> </span>-c<span class="w"> </span><span class="s2">"import torchaudio; print(torchaudio.__version__)"</span>
|
||
|
||
<span class="m">0</span>.13.0+cu116
|
||
</pre></div>
|
||
</div>
|
||
</section>
|
||
<section id="id3">
|
||
<h3>(4) Install k2<a class="headerlink" href="#id3" title="Permalink to this heading"></a></h3>
|
||
<p>We will install <a class="reference external" href="https://github.com/k2-fsa/k2">k2</a> from pre-compiled wheels by following
|
||
<a class="reference external" href="https://k2-fsa.github.io/k2/installation/from_wheels.html">https://k2-fsa.github.io/k2/installation/from_wheels.html</a></p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span>pip<span class="w"> </span>install<span class="w"> </span><span class="nv">k2</span><span class="o">==</span><span class="m">1</span>.24.3.dev20230725+cuda11.6.torch1.13.0<span class="w"> </span>-f<span class="w"> </span>https://k2-fsa.github.io/k2/cuda.html
|
||
|
||
Looking<span class="w"> </span><span class="k">in</span><span class="w"> </span>indexes:<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/simple
|
||
Looking<span class="w"> </span><span class="k">in</span><span class="w"> </span>links:<span class="w"> </span>https://k2-fsa.github.io/k2/cuda.html
|
||
Collecting<span class="w"> </span><span class="nv">k2</span><span class="o">==</span><span class="m">1</span>.24.3.dev20230725+cuda11.6.torch1.13.0
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://huggingface.co/csukuangfj/k2/resolve/main/ubuntu-cuda/k2-1.24.3.dev20230725%2Bcuda11.6.torch1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl<span class="w"> </span><span class="o">(</span><span class="m">104</span>.3<span class="w"> </span>MB<span class="o">)</span>
|
||
<span class="w"> </span>________________________________________<span class="w"> </span><span class="m">104</span>.3/104.3<span class="w"> </span>MB<span class="w"> </span><span class="m">5</span>.1<span class="w"> </span>MB/s<span class="w"> </span>eta<span class="w"> </span><span class="m">0</span>:00:00
|
||
Requirement<span class="w"> </span>already<span class="w"> </span>satisfied:<span class="w"> </span><span class="nv">torch</span><span class="o">==</span><span class="m">1</span>.13.0<span class="w"> </span><span class="k">in</span><span class="w"> </span>/star-fj/fangjun/test-icefall/lib/python3.8/site-packages<span class="w"> </span><span class="o">(</span>from<span class="w"> </span><span class="nv">k2</span><span class="o">==</span><span class="m">1</span>.24.3.dev20230725+cuda11.6.torch1.13.0<span class="o">)</span><span class="w"> </span><span class="o">(</span><span class="m">1</span>.13.0+cu116<span class="o">)</span>
|
||
Collecting<span class="w"> </span>graphviz
|
||
<span class="w"> </span>Using<span class="w"> </span>cached<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/de/5e/fcbb22c68208d39edff467809d06c9d81d7d27426460ebc598e55130c1aa/graphviz-0.20.1-py3-none-any.whl<span class="w"> </span><span class="o">(</span><span class="m">47</span><span class="w"> </span>kB<span class="o">)</span>
|
||
Requirement<span class="w"> </span>already<span class="w"> </span>satisfied:<span class="w"> </span>typing-extensions<span class="w"> </span><span class="k">in</span><span class="w"> </span>/star-fj/fangjun/test-icefall/lib/python3.8/site-packages<span class="w"> </span><span class="o">(</span>from<span class="w"> </span><span class="nv">torch</span><span class="o">==</span><span class="m">1</span>.13.0->k2<span class="o">==</span><span class="m">1</span>.24.3.dev20230725+cuda11.6.torch1.13.0<span class="o">)</span><span class="w"> </span><span class="o">(</span><span class="m">4</span>.7.1<span class="o">)</span>
|
||
Installing<span class="w"> </span>collected<span class="w"> </span>packages:<span class="w"> </span>graphviz,<span class="w"> </span>k2
|
||
Successfully<span class="w"> </span>installed<span class="w"> </span>graphviz-0.20.1<span class="w"> </span>k2-1.24.3.dev20230725+cuda11.6.torch1.13.0
|
||
</pre></div>
|
||
</div>
|
||
<div class="admonition hint">
|
||
<p class="admonition-title">Hint</p>
|
||
<p>Please refer to <a class="reference external" href="https://k2-fsa.github.io/k2/cuda.html">https://k2-fsa.github.io/k2/cuda.html</a> for the available
|
||
pre-compiled wheels about <a class="reference external" href="https://github.com/k2-fsa/k2">k2</a>.</p>
|
||
</div>
|
||
<p>Verify that <a class="reference external" href="https://github.com/k2-fsa/k2">k2</a> has been installed successfully:</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span>python3<span class="w"> </span>-m<span class="w"> </span>k2.version
|
||
|
||
Collecting<span class="w"> </span>environment<span class="w"> </span>information...
|
||
|
||
k2<span class="w"> </span>version:<span class="w"> </span><span class="m">1</span>.24.3
|
||
Build<span class="w"> </span>type:<span class="w"> </span>Release
|
||
Git<span class="w"> </span>SHA1:<span class="w"> </span>4c05309499a08454997adf500b56dcc629e35ae5
|
||
Git<span class="w"> </span>date:<span class="w"> </span>Tue<span class="w"> </span>Jul<span class="w"> </span><span class="m">25</span><span class="w"> </span><span class="m">16</span>:23:36<span class="w"> </span><span class="m">2023</span>
|
||
Cuda<span class="w"> </span>used<span class="w"> </span>to<span class="w"> </span>build<span class="w"> </span>k2:<span class="w"> </span><span class="m">11</span>.6
|
||
cuDNN<span class="w"> </span>used<span class="w"> </span>to<span class="w"> </span>build<span class="w"> </span>k2:<span class="w"> </span><span class="m">8</span>.3.2
|
||
Python<span class="w"> </span>version<span class="w"> </span>used<span class="w"> </span>to<span class="w"> </span>build<span class="w"> </span>k2:<span class="w"> </span><span class="m">3</span>.8
|
||
OS<span class="w"> </span>used<span class="w"> </span>to<span class="w"> </span>build<span class="w"> </span>k2:<span class="w"> </span>CentOS<span class="w"> </span>Linux<span class="w"> </span>release<span class="w"> </span><span class="m">7</span>.9.2009<span class="w"> </span><span class="o">(</span>Core<span class="o">)</span>
|
||
CMake<span class="w"> </span>version:<span class="w"> </span><span class="m">3</span>.27.0
|
||
GCC<span class="w"> </span>version:<span class="w"> </span><span class="m">9</span>.3.1
|
||
CMAKE_CUDA_FLAGS:<span class="w"> </span>-Wno-deprecated-gpu-targets<span class="w"> </span>-lineinfo<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-use_fast_math<span class="w"> </span>-Xptxas<span class="o">=</span>-w<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_35,code<span class="o">=</span>sm_35<span class="w"> </span>-lineinfo<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-use_fast_math<span class="w"> </span>-Xptxas<span class="o">=</span>-w<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_50,code<span class="o">=</span>sm_50<span class="w"> </span>-lineinfo<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-use_fast_math<span class="w"> </span>-Xptxas<span class="o">=</span>-w<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_60,code<span class="o">=</span>sm_60<span class="w"> </span>-lineinfo<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-use_fast_math<span class="w"> </span>-Xptxas<span class="o">=</span>-w<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_61,code<span class="o">=</span>sm_61<span class="w"> </span>-lineinfo<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-use_fast_math<span class="w"> </span>-Xptxas<span class="o">=</span>-w<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_70,code<span class="o">=</span>sm_70<span class="w"> </span>-lineinfo<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-use_fast_math<span class="w"> </span>-Xptxas<span class="o">=</span>-w<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_75,code<span class="o">=</span>sm_75<span class="w"> </span>-lineinfo<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-use_fast_math<span class="w"> </span>-Xptxas<span class="o">=</span>-w<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_80,code<span class="o">=</span>sm_80<span class="w"> </span>-lineinfo<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-use_fast_math<span class="w"> </span>-Xptxas<span class="o">=</span>-w<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_86,code<span class="o">=</span>sm_86<span class="w"> </span>-DONNX_NAMESPACE<span class="o">=</span>onnx_c2<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_35,code<span class="o">=</span>sm_35<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_50,code<span class="o">=</span>sm_50<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_52,code<span class="o">=</span>sm_52<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_60,code<span class="o">=</span>sm_60<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_61,code<span class="o">=</span>sm_61<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_70,code<span class="o">=</span>sm_70<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_75,code<span class="o">=</span>sm_75<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_80,code<span class="o">=</span>sm_80<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_86,code<span class="o">=</span>sm_86<span class="w"> </span>-gencode<span class="w"> </span><span class="nv">arch</span><span class="o">=</span>compute_86,code<span class="o">=</span>compute_86<span class="w"> </span>-Xcudafe<span class="w"> </span>--diag_suppress<span class="o">=</span>cc_clobber_ignored,--diag_suppress<span class="o">=</span>integer_sign_change,--diag_suppress<span class="o">=</span>useless_using_declaration,--diag_suppress<span class="o">=</span>set_but_not_used,--diag_suppress<span class="o">=</span>field_without_dll_interface,--diag_suppress<span class="o">=</span>base_class_has_different_dll_interface,--diag_suppress<span class="o">=</span>dll_interface_conflict_none_assumed,--diag_suppress<span class="o">=</span>dll_interface_conflict_dllexport_assumed,--diag_suppress<span class="o">=</span>implicit_return_from_non_void_function,--diag_suppress<span class="o">=</span>unsigned_compare_with_zero,--diag_suppress<span class="o">=</span>declared_but_not_referenced,--diag_suppress<span class="o">=</span>bad_friend_decl<span class="w"> </span>--expt-relaxed-constexpr<span class="w"> </span>--expt-extended-lambda<span class="w"> </span>-D_GLIBCXX_USE_CXX11_ABI<span class="o">=</span><span class="m">0</span><span class="w"> </span>--compiler-options<span class="w"> </span>-Wall<span class="w"> </span>--compiler-options<span class="w"> </span>-Wno-strict-overflow<span class="w"> </span>--compiler-options<span class="w"> </span>-Wno-unknown-pragmas
|
||
CMAKE_CXX_FLAGS:<span class="w"> </span>-D_GLIBCXX_USE_CXX11_ABI<span class="o">=</span><span class="m">0</span><span class="w"> </span>-Wno-unused-variable<span class="w"> </span>-Wno-strict-overflow
|
||
PyTorch<span class="w"> </span>version<span class="w"> </span>used<span class="w"> </span>to<span class="w"> </span>build<span class="w"> </span>k2:<span class="w"> </span><span class="m">1</span>.13.0+cu116
|
||
PyTorch<span class="w"> </span>is<span class="w"> </span>using<span class="w"> </span>Cuda:<span class="w"> </span><span class="m">11</span>.6
|
||
NVTX<span class="w"> </span>enabled:<span class="w"> </span>True
|
||
With<span class="w"> </span>CUDA:<span class="w"> </span>True
|
||
Disable<span class="w"> </span>debug:<span class="w"> </span>True
|
||
Sync<span class="w"> </span>kernels<span class="w"> </span>:<span class="w"> </span>False
|
||
Disable<span class="w"> </span>checks:<span class="w"> </span>False
|
||
Max<span class="w"> </span>cpu<span class="w"> </span>memory<span class="w"> </span>allocate:<span class="w"> </span><span class="m">214748364800</span><span class="w"> </span>bytes<span class="w"> </span><span class="o">(</span>or<span class="w"> </span><span class="m">200</span>.0<span class="w"> </span>GB<span class="o">)</span>
|
||
k2<span class="w"> </span>abort:<span class="w"> </span>False
|
||
__file__:<span class="w"> </span>/star-fj/fangjun/test-icefall/lib/python3.8/site-packages/k2/version/version.py
|
||
_k2.__file__:<span class="w"> </span>/star-fj/fangjun/test-icefall/lib/python3.8/site-packages/_k2.cpython-38-x86_64-linux-gnu.so
|
||
</pre></div>
|
||
</div>
|
||
</section>
|
||
<section id="id5">
|
||
<h3>(5) Install lhotse<a class="headerlink" href="#id5" title="Permalink to this heading"></a></h3>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span>pip<span class="w"> </span>install<span class="w"> </span>git+https://github.com/lhotse-speech/lhotse
|
||
|
||
Collecting<span class="w"> </span>git+https://github.com/lhotse-speech/lhotse
|
||
<span class="w"> </span>Cloning<span class="w"> </span>https://github.com/lhotse-speech/lhotse<span class="w"> </span>to<span class="w"> </span>/tmp/pip-req-build-vq12fd5i
|
||
<span class="w"> </span>Running<span class="w"> </span><span class="nb">command</span><span class="w"> </span>git<span class="w"> </span>clone<span class="w"> </span>--filter<span class="o">=</span>blob:none<span class="w"> </span>--quiet<span class="w"> </span>https://github.com/lhotse-speech/lhotse<span class="w"> </span>/tmp/pip-req-build-vq12fd5i
|
||
<span class="w"> </span>Resolved<span class="w"> </span>https://github.com/lhotse-speech/lhotse<span class="w"> </span>to<span class="w"> </span>commit<span class="w"> </span>7640d663469b22cd0b36f3246ee9b849cd25e3b7
|
||
<span class="w"> </span>Installing<span class="w"> </span>build<span class="w"> </span>dependencies<span class="w"> </span>...<span class="w"> </span><span class="k">done</span>
|
||
<span class="w"> </span>Getting<span class="w"> </span>requirements<span class="w"> </span>to<span class="w"> </span>build<span class="w"> </span>wheel<span class="w"> </span>...<span class="w"> </span><span class="k">done</span>
|
||
<span class="w"> </span>Preparing<span class="w"> </span>metadata<span class="w"> </span><span class="o">(</span>pyproject.toml<span class="o">)</span><span class="w"> </span>...<span class="w"> </span><span class="k">done</span>
|
||
Collecting<span class="w"> </span>cytoolz><span class="o">=</span><span class="m">0</span>.10.1
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/1e/3b/a7828d575aa17fb7acaf1ced49a3655aa36dad7e16eb7e6a2e4df0dda76f/cytoolz-0.12.2-cp38-cp38-
|
||
manylinux_2_17_x86_64.manylinux2014_x86_64.whl<span class="w"> </span><span class="o">(</span><span class="m">2</span>.0<span class="w"> </span>MB<span class="o">)</span>
|
||
<span class="w"> </span>________________________________________<span class="w"> </span><span class="m">2</span>.0/2.0<span class="w"> </span>MB<span class="w"> </span><span class="m">33</span>.2<span class="w"> </span>MB/s<span class="w"> </span>eta<span class="w"> </span><span class="m">0</span>:00:00
|
||
Collecting<span class="w"> </span>pyyaml><span class="o">=</span><span class="m">5</span>.3.1
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/c8/6b/6600ac24725c7388255b2f5add93f91e58a5d7efaf4af244fdbcc11a541b/PyYAML-6.0.1-cp38-cp38-ma
|
||
nylinux_2_17_x86_64.manylinux2014_x86_64.whl<span class="w"> </span><span class="o">(</span><span class="m">736</span><span class="w"> </span>kB<span class="o">)</span>
|
||
<span class="w"> </span>________________________________________<span class="w"> </span><span class="m">736</span>.6/736.6<span class="w"> </span>kB<span class="w"> </span><span class="m">38</span>.6<span class="w"> </span>MB/s<span class="w"> </span>eta<span class="w"> </span><span class="m">0</span>:00:00
|
||
Collecting<span class="w"> </span>dataclasses
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/26/2f/1095cdc2868052dd1e64520f7c0d5c8c550ad297e944e641dbf1ffbb9a5d/dataclasses-0.6-py3-none-
|
||
any.whl<span class="w"> </span><span class="o">(</span><span class="m">14</span><span class="w"> </span>kB<span class="o">)</span>
|
||
Requirement<span class="w"> </span>already<span class="w"> </span>satisfied:<span class="w"> </span>torchaudio<span class="w"> </span><span class="k">in</span><span class="w"> </span>./test-icefall/lib/python3.8/site-packages<span class="w"> </span><span class="o">(</span>from<span class="w"> </span><span class="nv">lhotse</span><span class="o">==</span><span class="m">1</span>.16.0.dev0+git.7640d66.clean<span class="o">)</span><span class="w"> </span><span class="o">(</span><span class="m">0</span>.13.0+cu116<span class="o">)</span>
|
||
Collecting<span class="w"> </span>lilcom><span class="o">=</span><span class="m">1</span>.1.0
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/a8/65/df0a69c52bd085ca1ad4e5c4c1a5c680e25f9477d8e49316c4ff1e5084a4/lilcom-1.7-cp38-cp38-many
|
||
linux_2_17_x86_64.manylinux2014_x86_64.whl<span class="w"> </span><span class="o">(</span><span class="m">87</span><span class="w"> </span>kB<span class="o">)</span>
|
||
<span class="w"> </span>________________________________________<span class="w"> </span><span class="m">87</span>.1/87.1<span class="w"> </span>kB<span class="w"> </span><span class="m">8</span>.7<span class="w"> </span>MB/s<span class="w"> </span>eta<span class="w"> </span><span class="m">0</span>:00:00
|
||
Collecting<span class="w"> </span>tqdm
|
||
<span class="w"> </span>Using<span class="w"> </span>cached<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/e6/02/a2cff6306177ae6bc73bc0665065de51dfb3b9db7373e122e2735faf0d97/tqdm-4.65.0-py3-none-any
|
||
.whl<span class="w"> </span><span class="o">(</span><span class="m">77</span><span class="w"> </span>kB<span class="o">)</span>
|
||
Requirement<span class="w"> </span>already<span class="w"> </span>satisfied:<span class="w"> </span>numpy><span class="o">=</span><span class="m">1</span>.18.1<span class="w"> </span><span class="k">in</span><span class="w"> </span>./test-icefall/lib/python3.8/site-packages<span class="w"> </span><span class="o">(</span>from<span class="w"> </span><span class="nv">lhotse</span><span class="o">==</span><span class="m">1</span>.16.0.dev0+git.7640d66.clean<span class="o">)</span><span class="w"> </span><span class="o">(</span><span class="m">1</span>.24.4<span class="o">)</span>
|
||
Collecting<span class="w"> </span>audioread><span class="o">=</span><span class="m">2</span>.1.9
|
||
<span class="w"> </span>Using<span class="w"> </span>cached<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/5d/cb/82a002441902dccbe427406785db07af10182245ee639ea9f4d92907c923/audioread-3.0.0.tar.gz<span class="w"> </span><span class="o">(</span>
|
||
<span class="m">377</span><span class="w"> </span>kB<span class="o">)</span>
|
||
<span class="w"> </span>Preparing<span class="w"> </span>metadata<span class="w"> </span><span class="o">(</span>setup.py<span class="o">)</span><span class="w"> </span>...<span class="w"> </span><span class="k">done</span>
|
||
Collecting<span class="w"> </span>tabulate><span class="o">=</span><span class="m">0</span>.8.1
|
||
<span class="w"> </span>Using<span class="w"> </span>cached<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-
|
||
any.whl<span class="w"> </span><span class="o">(</span><span class="m">35</span><span class="w"> </span>kB<span class="o">)</span>
|
||
Collecting<span class="w"> </span>click><span class="o">=</span><span class="m">7</span>.1.1
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/1a/70/e63223f8116931d365993d4a6b7ef653a4d920b41d03de7c59499962821f/click-8.1.6-py3-none-any.
|
||
whl<span class="w"> </span><span class="o">(</span><span class="m">97</span><span class="w"> </span>kB<span class="o">)</span>
|
||
<span class="w"> </span>________________________________________<span class="w"> </span><span class="m">97</span>.9/97.9<span class="w"> </span>kB<span class="w"> </span><span class="m">8</span>.4<span class="w"> </span>MB/s<span class="w"> </span>eta<span class="w"> </span><span class="m">0</span>:00:00
|
||
Collecting<span class="w"> </span>packaging
|
||
<span class="w"> </span>Using<span class="w"> </span>cached<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/ab/c3/57f0601a2d4fe15de7a553c00adbc901425661bf048f2a22dfc500caf121/packaging-23.1-py3-none-
|
||
any.whl<span class="w"> </span><span class="o">(</span><span class="m">48</span><span class="w"> </span>kB<span class="o">)</span>
|
||
Collecting<span class="w"> </span>intervaltree><span class="o">=</span><span class="m">3</span>.1.0
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/50/fb/396d568039d21344639db96d940d40eb62befe704ef849b27949ded5c3bb/intervaltree-3.1.0.tar.gz
|
||
<span class="w"> </span><span class="o">(</span><span class="m">32</span><span class="w"> </span>kB<span class="o">)</span>
|
||
<span class="w"> </span>Preparing<span class="w"> </span>metadata<span class="w"> </span><span class="o">(</span>setup.py<span class="o">)</span><span class="w"> </span>...<span class="w"> </span><span class="k">done</span>
|
||
Requirement<span class="w"> </span>already<span class="w"> </span>satisfied:<span class="w"> </span>torch<span class="w"> </span><span class="k">in</span><span class="w"> </span>./test-icefall/lib/python3.8/site-packages<span class="w"> </span><span class="o">(</span>from<span class="w"> </span><span class="nv">lhotse</span><span class="o">==</span><span class="m">1</span>.16.0.dev0+git.7640d66.clean<span class="o">)</span><span class="w"> </span><span class="o">(</span><span class="m">1</span>.13.0+cu116<span class="o">)</span>
|
||
Collecting<span class="w"> </span>SoundFile><span class="o">=</span><span class="m">0</span>.10
|
||
<span class="w"> </span>Downloading<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/ad/bd/0602167a213d9184fc688b1086dc6d374b7ae8c33eccf169f9b50ce6568c/soundfile-0.12.1-py2.py3-
|
||
none-manylinux_2_17_x86_64.whl<span class="w"> </span><span class="o">(</span><span class="m">1</span>.3<span class="w"> </span>MB<span class="o">)</span>
|
||
<span class="w"> </span>________________________________________<span class="w"> </span><span class="m">1</span>.3/1.3<span class="w"> </span>MB<span class="w"> </span><span class="m">46</span>.5<span class="w"> </span>MB/s<span class="w"> </span>eta<span class="w"> </span><span class="m">0</span>:00:00
|
||
Collecting<span class="w"> </span>toolz><span class="o">=</span><span class="m">0</span>.8.0
|
||
<span class="w"> </span>Using<span class="w"> </span>cached<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/7f/5c/922a3508f5bda2892be3df86c74f9cf1e01217c2b1f8a0ac4841d903e3e9/toolz-0.12.0-py3-none-any.whl<span class="w"> </span><span class="o">(</span><span class="m">55</span><span class="w"> </span>kB<span class="o">)</span>
|
||
Collecting<span class="w"> </span>sortedcontainers<<span class="m">3</span>.0,><span class="o">=</span><span class="m">2</span>.0
|
||
<span class="w"> </span>Using<span class="w"> </span>cached<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl<span class="w"> </span><span class="o">(</span><span class="m">29</span><span class="w"> </span>kB<span class="o">)</span>
|
||
Collecting<span class="w"> </span>cffi><span class="o">=</span><span class="m">1</span>.0
|
||
<span class="w"> </span>Using<span class="w"> </span>cached<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/b7/8b/06f30caa03b5b3ac006de4f93478dbd0239e2a16566d81a106c322dc4f79/cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl<span class="w"> </span><span class="o">(</span><span class="m">442</span><span class="w"> </span>kB<span class="o">)</span>
|
||
Requirement<span class="w"> </span>already<span class="w"> </span>satisfied:<span class="w"> </span>typing-extensions<span class="w"> </span><span class="k">in</span><span class="w"> </span>./test-icefall/lib/python3.8/site-packages<span class="w"> </span><span class="o">(</span>from<span class="w"> </span>torch->lhotse<span class="o">==</span><span class="m">1</span>.16.0.dev0+git.7640d66.clean<span class="o">)</span><span class="w"> </span><span class="o">(</span><span class="m">4</span>.7.1<span class="o">)</span>
|
||
Collecting<span class="w"> </span>pycparser
|
||
<span class="w"> </span>Using<span class="w"> </span>cached<span class="w"> </span>https://pypi.tuna.tsinghua.edu.cn/packages/62/d5/5f610ebe421e85889f2e55e33b7f9a6795bd982198517d912eb1c76e1a53/pycparser-2.21-py2.py3-none-any.whl<span class="w"> </span><span class="o">(</span><span class="m">118</span><span class="w"> </span>kB<span class="o">)</span>
|
||
Building<span class="w"> </span>wheels<span class="w"> </span><span class="k">for</span><span class="w"> </span>collected<span class="w"> </span>packages:<span class="w"> </span>lhotse,<span class="w"> </span>audioread,<span class="w"> </span>intervaltree
|
||
<span class="w"> </span>Building<span class="w"> </span>wheel<span class="w"> </span><span class="k">for</span><span class="w"> </span>lhotse<span class="w"> </span><span class="o">(</span>pyproject.toml<span class="o">)</span><span class="w"> </span>...<span class="w"> </span><span class="k">done</span>
|
||
<span class="w"> </span>Created<span class="w"> </span>wheel<span class="w"> </span><span class="k">for</span><span class="w"> </span>lhotse:<span class="w"> </span><span class="nv">filename</span><span class="o">=</span>lhotse-1.16.0.dev0+git.7640d66.clean-py3-none-any.whl<span class="w"> </span><span class="nv">size</span><span class="o">=</span><span class="m">687627</span><span class="w"> </span><span class="nv">sha256</span><span class="o">=</span>cbf0a4d2d0b639b33b91637a4175bc251d6a021a069644ecb1a9f2b3a83d072a
|
||
<span class="w"> </span>Stored<span class="w"> </span><span class="k">in</span><span class="w"> </span>directory:<span class="w"> </span>/tmp/pip-ephem-wheel-cache-wwtk90_m/wheels/7f/7a/8e/a0bf241336e2e3cb573e1e21e5600952d49f5162454f2e612f
|
||
<span class="w"> </span>Building<span class="w"> </span>wheel<span class="w"> </span><span class="k">for</span><span class="w"> </span>audioread<span class="w"> </span><span class="o">(</span>setup.py<span class="o">)</span><span class="w"> </span>...<span class="w"> </span><span class="k">done</span>
|
||
<span class="w"> </span>Created<span class="w"> </span>wheel<span class="w"> </span><span class="k">for</span><span class="w"> </span>audioread:<span class="w"> </span><span class="nv">filename</span><span class="o">=</span>audioread-3.0.0-py3-none-any.whl<span class="w"> </span><span class="nv">size</span><span class="o">=</span><span class="m">23704</span><span class="w"> </span><span class="nv">sha256</span><span class="o">=</span>5e2d3537c96ce9cf0f645a654c671163707bf8cb8d9e358d0e2b0939a85ff4c2
|
||
<span class="w"> </span>Stored<span class="w"> </span><span class="k">in</span><span class="w"> </span>directory:<span class="w"> </span>/star-fj/fangjun/.cache/pip/wheels/e2/c3/9c/f19ae5a03f8862d9f0776b0c0570f1fdd60a119d90954e3f39
|
||
<span class="w"> </span>Building<span class="w"> </span>wheel<span class="w"> </span><span class="k">for</span><span class="w"> </span>intervaltree<span class="w"> </span><span class="o">(</span>setup.py<span class="o">)</span><span class="w"> </span>...<span class="w"> </span><span class="k">done</span>
|
||
<span class="w"> </span>Created<span class="w"> </span>wheel<span class="w"> </span><span class="k">for</span><span class="w"> </span>intervaltree:<span class="w"> </span><span class="nv">filename</span><span class="o">=</span>intervaltree-3.1.0-py2.py3-none-any.whl<span class="w"> </span><span class="nv">size</span><span class="o">=</span><span class="m">26098</span><span class="w"> </span><span class="nv">sha256</span><span class="o">=</span>2604170976cfffe0d2f678cb1a6e5b525f561cd50babe53d631a186734fec9f9
|
||
<span class="w"> </span>Stored<span class="w"> </span><span class="k">in</span><span class="w"> </span>directory:<span class="w"> </span>/star-fj/fangjun/.cache/pip/wheels/f3/ed/2b/c179ebfad4e15452d6baef59737f27beb9bfb442e0620f7271
|
||
Successfully<span class="w"> </span>built<span class="w"> </span>lhotse<span class="w"> </span>audioread<span class="w"> </span>intervaltree
|
||
Installing<span class="w"> </span>collected<span class="w"> </span>packages:<span class="w"> </span>sortedcontainers,<span class="w"> </span>dataclasses,<span class="w"> </span>tqdm,<span class="w"> </span>toolz,<span class="w"> </span>tabulate,<span class="w"> </span>pyyaml,<span class="w"> </span>pycparser,<span class="w"> </span>packaging,<span class="w"> </span>lilcom,<span class="w"> </span>intervaltree,<span class="w"> </span>click,<span class="w"> </span>audioread,<span class="w"> </span>cytoolz,<span class="w"> </span>cffi,<span class="w"> </span>SoundFile,<span class="w"> </span>lhotse
|
||
Successfully<span class="w"> </span>installed<span class="w"> </span>SoundFile-0.12.1<span class="w"> </span>audioread-3.0.0<span class="w"> </span>cffi-1.15.1<span class="w"> </span>click-8.1.6<span class="w"> </span>cytoolz-0.12.2<span class="w"> </span>dataclasses-0.6<span class="w"> </span>intervaltree-3.1.0<span class="w"> </span>lhotse-1.16.0.dev0+git.7640d66.clean<span class="w"> </span>lilcom-1.7<span class="w"> </span>packaging-23.1<span class="w"> </span>pycparser-2.21<span class="w"> </span>pyyaml-6.0.1<span class="w"> </span>sortedcontainers-2.4.0<span class="w"> </span>tabulate-0.9.0<span class="w"> </span>toolz-0.12.0<span class="w"> </span>tqdm-4.65.0
|
||
</pre></div>
|
||
</div>
|
||
<p>Verify that <a class="reference external" href="https://github.com/lhotse-speech/lhotse">lhotse</a> has been installed successfully:</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span>python3<span class="w"> </span>-c<span class="w"> </span><span class="s2">"import lhotse; print(lhotse.__version__)"</span>
|
||
|
||
<span class="m">1</span>.16.0.dev+git.7640d66.clean
|
||
</pre></div>
|
||
</div>
|
||
</section>
|
||
<section id="id6">
|
||
<h3>(6) Download icefall<a class="headerlink" href="#id6" title="Permalink to this heading"></a></h3>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:~$<span class="w"> </span><span class="nb">cd</span><span class="w"> </span>/tmp/
|
||
|
||
<span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:tmp$<span class="w"> </span>git<span class="w"> </span>clone<span class="w"> </span>https://github.com/k2-fsa/icefall
|
||
|
||
Cloning<span class="w"> </span>into<span class="w"> </span><span class="s1">'icefall'</span>...
|
||
remote:<span class="w"> </span>Enumerating<span class="w"> </span>objects:<span class="w"> </span><span class="m">12942</span>,<span class="w"> </span><span class="k">done</span>.
|
||
remote:<span class="w"> </span>Counting<span class="w"> </span>objects:<span class="w"> </span><span class="m">100</span>%<span class="w"> </span><span class="o">(</span><span class="m">67</span>/67<span class="o">)</span>,<span class="w"> </span><span class="k">done</span>.
|
||
remote:<span class="w"> </span>Compressing<span class="w"> </span>objects:<span class="w"> </span><span class="m">100</span>%<span class="w"> </span><span class="o">(</span><span class="m">56</span>/56<span class="o">)</span>,<span class="w"> </span><span class="k">done</span>.
|
||
remote:<span class="w"> </span>Total<span class="w"> </span><span class="m">12942</span><span class="w"> </span><span class="o">(</span>delta<span class="w"> </span><span class="m">17</span><span class="o">)</span>,<span class="w"> </span>reused<span class="w"> </span><span class="m">35</span><span class="w"> </span><span class="o">(</span>delta<span class="w"> </span><span class="m">6</span><span class="o">)</span>,<span class="w"> </span>pack-reused<span class="w"> </span><span class="m">12875</span>
|
||
Receiving<span class="w"> </span>objects:<span class="w"> </span><span class="m">100</span>%<span class="w"> </span><span class="o">(</span><span class="m">12942</span>/12942<span class="o">)</span>,<span class="w"> </span><span class="m">14</span>.77<span class="w"> </span>MiB<span class="w"> </span><span class="p">|</span><span class="w"> </span><span class="m">9</span>.29<span class="w"> </span>MiB/s,<span class="w"> </span><span class="k">done</span>.
|
||
Resolving<span class="w"> </span>deltas:<span class="w"> </span><span class="m">100</span>%<span class="w"> </span><span class="o">(</span><span class="m">8835</span>/8835<span class="o">)</span>,<span class="w"> </span><span class="k">done</span>.
|
||
|
||
<span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:tmp$<span class="w"> </span><span class="nb">cd</span><span class="w"> </span>icefall/
|
||
|
||
<span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:icefall$<span class="w"> </span>pip<span class="w"> </span>install<span class="w"> </span>-r<span class="w"> </span>./requirements.txt
|
||
</pre></div>
|
||
</div>
|
||
</section>
|
||
</section>
|
||
<section id="test-your-installation">
|
||
<h2>Test Your Installation<a class="headerlink" href="#test-your-installation" title="Permalink to this heading"></a></h2>
|
||
<p>To test that your installation is successful, let us run
|
||
the <a class="reference external" href="https://github.com/k2-fsa/icefall/tree/master/egs/yesno/ASR">yesno recipe</a>
|
||
on <code class="docutils literal notranslate"><span class="pre">CPU</span></code>.</p>
|
||
<section id="data-preparation">
|
||
<h3>Data preparation<a class="headerlink" href="#data-preparation" title="Permalink to this heading"></a></h3>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:icefall$<span class="w"> </span><span class="nb">export</span><span class="w"> </span><span class="nv">PYTHONPATH</span><span class="o">=</span>/tmp/icefall:<span class="nv">$PYTHONPATH</span>
|
||
|
||
<span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:icefall$<span class="w"> </span><span class="nb">cd</span><span class="w"> </span>/tmp/icefall
|
||
|
||
<span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:icefall$<span class="w"> </span><span class="nb">cd</span><span class="w"> </span>egs/yesno/ASR
|
||
|
||
<span class="o">(</span>test-icefall<span class="o">)</span><span class="w"> </span>kuangfangjun:ASR$<span class="w"> </span>./prepare.sh
|
||
</pre></div>
|
||
</div>
|
||
<p>The log of running <code class="docutils literal notranslate"><span class="pre">./prepare.sh</span></code> is:</p>
|
||
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>2023-07-27 12:41:39 (prepare.sh:27:main) dl_dir: /tmp/icefall/egs/yesno/ASR/download
|
||
2023-07-27 12:41:39 (prepare.sh:30:main) Stage 0: Download data
|
||
/tmp/icefall/egs/yesno/ASR/download/waves_yesno.tar.gz: 100%|___________________________________________________| 4.70M/4.70M [00:00<00:00, 11.1MB/s]
|
||
2023-07-27 12:41:46 (prepare.sh:39:main) Stage 1: Prepare yesno manifest
|
||
2023-07-27 12:41:50 (prepare.sh:45:main) Stage 2: Compute fbank for yesno
|
||
2023-07-27 12:41:55,718 INFO [compute_fbank_yesno.py:65] Processing train
|
||
Extracting and storing features: 100%|_______________________________________________________________________________| 90/90 [00:01<00:00, 87.82it/s]
|
||
2023-07-27 12:41:56,778 INFO [compute_fbank_yesno.py:65] Processing test
|
||
Extracting and storing features: 100%|______________________________________________________________________________| 30/30 [00:00<00:00, 256.92it/s]
|
||
2023-07-27 12:41:57 (prepare.sh:51:main) Stage 3: Prepare lang
|
||
2023-07-27 12:42:02 (prepare.sh:66:main) Stage 4: Prepare G
|
||
/project/kaldilm/csrc/arpa_file_parser.cc:void kaldilm::ArpaFileParser::Read(std::istream&):79
|
||
[I] Reading \data\ section.
|
||
/project/kaldilm/csrc/arpa_file_parser.cc:void kaldilm::ArpaFileParser::Read(std::istream&):140
|
||
[I] Reading \1-grams: section.
|
||
2023-07-27 12:42:02 (prepare.sh:92:main) Stage 5: Compile HLG
|
||
2023-07-27 12:42:07,275 INFO [compile_hlg.py:124] Processing data/lang_phone
|
||
2023-07-27 12:42:07,276 INFO [lexicon.py:171] Converting L.pt to Linv.pt
|
||
2023-07-27 12:42:07,309 INFO [compile_hlg.py:48] Building ctc_topo. max_token_id: 3
|
||
2023-07-27 12:42:07,310 INFO [compile_hlg.py:52] Loading G.fst.txt
|
||
2023-07-27 12:42:07,314 INFO [compile_hlg.py:62] Intersecting L and G
|
||
2023-07-27 12:42:07,323 INFO [compile_hlg.py:64] LG shape: (4, None)
|
||
2023-07-27 12:42:07,323 INFO [compile_hlg.py:66] Connecting LG
|
||
2023-07-27 12:42:07,323 INFO [compile_hlg.py:68] LG shape after k2.connect: (4, None)
|
||
2023-07-27 12:42:07,323 INFO [compile_hlg.py:70] <class 'torch.Tensor'>
|
||
2023-07-27 12:42:07,323 INFO [compile_hlg.py:71] Determinizing LG
|
||
2023-07-27 12:42:07,341 INFO [compile_hlg.py:74] <class '_k2.ragged.RaggedTensor'>
|
||
2023-07-27 12:42:07,341 INFO [compile_hlg.py:76] Connecting LG after k2.determinize
|
||
2023-07-27 12:42:07,341 INFO [compile_hlg.py:79] Removing disambiguation symbols on LG
|
||
2023-07-27 12:42:07,354 INFO [compile_hlg.py:91] LG shape after k2.remove_epsilon: (6, None)
|
||
2023-07-27 12:42:07,445 INFO [compile_hlg.py:96] Arc sorting LG
|
||
2023-07-27 12:42:07,445 INFO [compile_hlg.py:99] Composing H and LG
|
||
2023-07-27 12:42:07,446 INFO [compile_hlg.py:106] Connecting LG
|
||
2023-07-27 12:42:07,446 INFO [compile_hlg.py:109] Arc sorting LG
|
||
2023-07-27 12:42:07,447 INFO [compile_hlg.py:111] HLG.shape: (8, None)
|
||
2023-07-27 12:42:07,447 INFO [compile_hlg.py:127] Saving HLG.pt to data/lang_phone
|
||
</pre></div>
|
||
</div>
|
||
</section>
|
||
<section id="training">
|
||
<h3>Training<a class="headerlink" href="#training" title="Permalink to this heading"></a></h3>
|
||
<p>Now let us run the training part:</p>
|
||
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>(test-icefall) kuangfangjun:ASR$ export CUDA_VISIBLE_DEVICES=""
|
||
|
||
(test-icefall) kuangfangjun:ASR$ ./tdnn/train.py
|
||
</pre></div>
|
||
</div>
|
||
<div class="admonition caution">
|
||
<p class="admonition-title">Caution</p>
|
||
<p>We use <code class="docutils literal notranslate"><span class="pre">export</span> <span class="pre">CUDA_VISIBLE_DEVICES=""</span></code> so that <a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a> uses CPU
|
||
even if there are GPUs available.</p>
|
||
</div>
|
||
<div class="admonition hint">
|
||
<p class="admonition-title">Hint</p>
|
||
<p>In case you get a <code class="docutils literal notranslate"><span class="pre">Segmentation</span> <span class="pre">fault</span> <span class="pre">(core</span> <span class="pre">dump)</span></code> error, please use:</p>
|
||
<blockquote>
|
||
<div><div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="nb">export</span><span class="w"> </span><span class="nv">PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION</span><span class="o">=</span>python
|
||
</pre></div>
|
||
</div>
|
||
</div></blockquote>
|
||
<p>See more at <cite><https://github.com/k2-fsa/icefall/issues/674></cite> if you are
|
||
interested.</p>
|
||
</div>
|
||
<p>The training log is given below:</p>
|
||
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>2023-07-27 12:50:51,936 INFO [train.py:481] Training started
|
||
2023-07-27 12:50:51,936 INFO [train.py:482] {'exp_dir': PosixPath('tdnn/exp'), 'lang_dir': PosixPath('data/lang_phone'), 'lr': 0.01, 'feature_dim': 23, 'weight_decay': 1e-06, 'start_epoch': 0, 'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 10, 'reset_interval': 20, 'valid_interval': 10, 'beam_size': 10, 'reduction': 'sum', 'use_double_scores': True, 'world_size': 1, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 15, 'seed': 42, 'feature_dir': PosixPath('data/fbank'), 'max_duration': 30.0, 'bucketing_sampler': False, 'num_buckets': 10, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': False, 'return_cuts': True, 'num_workers': 2, 'env_info': {'k2-version': '1.24.3', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '4c05309499a08454997adf500b56dcc629e35ae5', 'k2-git-date': 'Tue Jul 25 16:23:36 2023', 'lhotse-version': '1.16.0.dev+git.7640d66.clean', 'torch-version': '1.13.0+cu116', 'torch-cuda-available': False, 'torch-cuda-version': '11.6', 'python-version': '3.8', 'icefall-git-branch': 'master', 'icefall-git-sha1': '3fb0a43-clean', 'icefall-git-date': 'Thu Jul 27 12:36:05 2023', 'icefall-path': '/tmp/icefall', 'k2-path': '/star-fj/fangjun/test-icefall/lib/python3.8/site-packages/k2/__init__.py', 'lhotse-path': '/star-fj/fangjun/test-icefall/lib/python3.8/site-packages/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-1-1220091118-57c4d55446-sph26', 'IP address': '10.177.77.20'}}
|
||
2023-07-27 12:50:51,941 INFO [lexicon.py:168] Loading pre-compiled data/lang_phone/Linv.pt
|
||
2023-07-27 12:50:51,949 INFO [train.py:495] device: cpu
|
||
2023-07-27 12:50:51,965 INFO [asr_datamodule.py:146] About to get train cuts
|
||
2023-07-27 12:50:51,965 INFO [asr_datamodule.py:244] About to get train cuts
|
||
2023-07-27 12:50:51,967 INFO [asr_datamodule.py:149] About to create train dataset
|
||
2023-07-27 12:50:51,967 INFO [asr_datamodule.py:199] Using SingleCutSampler.
|
||
2023-07-27 12:50:51,967 INFO [asr_datamodule.py:205] About to create train dataloader
|
||
2023-07-27 12:50:51,968 INFO [asr_datamodule.py:218] About to get test cuts
|
||
2023-07-27 12:50:51,968 INFO [asr_datamodule.py:252] About to get test cuts
|
||
2023-07-27 12:50:52,565 INFO [train.py:422] Epoch 0, batch 0, loss[loss=1.065, over 2436.00 frames. ], tot_loss[loss=1.065, over 2436.00 frames. ], batch size: 4
|
||
2023-07-27 12:50:53,681 INFO [train.py:422] Epoch 0, batch 10, loss[loss=0.4561, over 2828.00 frames. ], tot_loss[loss=0.7076, over 22192.90 frames.], batch size: 4
|
||
2023-07-27 12:50:54,167 INFO [train.py:444] Epoch 0, validation loss=0.9002, over 18067.00 frames.
|
||
2023-07-27 12:50:55,011 INFO [train.py:422] Epoch 0, batch 20, loss[loss=0.2555, over 2695.00 frames. ], tot_loss[loss=0.484, over 34971.47 frames. ], batch size: 5
|
||
2023-07-27 12:50:55,331 INFO [train.py:444] Epoch 0, validation loss=0.4688, over 18067.00 frames.
|
||
2023-07-27 12:50:55,368 INFO [checkpoint.py:75] Saving checkpoint to tdnn/exp/epoch-0.pt
|
||
2023-07-27 12:50:55,633 INFO [train.py:422] Epoch 1, batch 0, loss[loss=0.2532, over 2436.00 frames. ], tot_loss[loss=0.2532, over 2436.00 frames. ],
|
||
batch size: 4
|
||
2023-07-27 12:50:56,242 INFO [train.py:422] Epoch 1, batch 10, loss[loss=0.1139, over 2828.00 frames. ], tot_loss[loss=0.1592, over 22192.90 frames.], batch size: 4
|
||
2023-07-27 12:50:56,522 INFO [train.py:444] Epoch 1, validation loss=0.1627, over 18067.00 frames.
|
||
2023-07-27 12:50:57,209 INFO [train.py:422] Epoch 1, batch 20, loss[loss=0.07055, over 2695.00 frames. ], tot_loss[loss=0.1175, over 34971.47 frames.], batch size: 5
|
||
2023-07-27 12:50:57,600 INFO [train.py:444] Epoch 1, validation loss=0.07091, over 18067.00 frames.
|
||
2023-07-27 12:50:57,640 INFO [checkpoint.py:75] Saving checkpoint to tdnn/exp/epoch-1.pt
|
||
2023-07-27 12:50:57,847 INFO [train.py:422] Epoch 2, batch 0, loss[loss=0.07731, over 2436.00 frames. ], tot_loss[loss=0.07731, over 2436.00 frames.], batch size: 4
|
||
2023-07-27 12:50:58,427 INFO [train.py:422] Epoch 2, batch 10, loss[loss=0.04391, over 2828.00 frames. ], tot_loss[loss=0.05341, over 22192.90 frames. ], batch size: 4
|
||
2023-07-27 12:50:58,884 INFO [train.py:444] Epoch 2, validation loss=0.04384, over 18067.00 frames.
|
||
2023-07-27 12:50:59,387 INFO [train.py:422] Epoch 2, batch 20, loss[loss=0.03458, over 2695.00 frames. ], tot_loss[loss=0.04616, over 34971.47 frames. ], batch size: 5
|
||
2023-07-27 12:50:59,707 INFO [train.py:444] Epoch 2, validation loss=0.03379, over 18067.00 frames.
|
||
2023-07-27 12:50:59,758 INFO [checkpoint.py:75] Saving checkpoint to tdnn/exp/epoch-2.pt
|
||
|
||
... ...
|
||
|
||
2023-07-27 12:51:23,433 INFO [train.py:422] Epoch 13, batch 0, loss[loss=0.01054, over 2436.00 frames. ], tot_loss[loss=0.01054, over 2436.00 frames. ], batch size: 4
|
||
2023-07-27 12:51:23,980 INFO [train.py:422] Epoch 13, batch 10, loss[loss=0.009014, over 2828.00 frames. ], tot_loss[loss=0.009974, over 22192.90 frames. ], batch size: 4
|
||
2023-07-27 12:51:24,489 INFO [train.py:444] Epoch 13, validation loss=0.01085, over 18067.00 frames.
|
||
2023-07-27 12:51:25,258 INFO [train.py:422] Epoch 13, batch 20, loss[loss=0.01172, over 2695.00 frames. ], tot_loss[loss=0.01055, over 34971.47 frames. ], batch size: 5
|
||
2023-07-27 12:51:25,621 INFO [train.py:444] Epoch 13, validation loss=0.01074, over 18067.00 frames.
|
||
2023-07-27 12:51:25,699 INFO [checkpoint.py:75] Saving checkpoint to tdnn/exp/epoch-13.pt
|
||
2023-07-27 12:51:25,866 INFO [train.py:422] Epoch 14, batch 0, loss[loss=0.01044, over 2436.00 frames. ], tot_loss[loss=0.01044, over 2436.00 frames. ], batch size: 4
|
||
2023-07-27 12:51:26,844 INFO [train.py:422] Epoch 14, batch 10, loss[loss=0.008942, over 2828.00 frames. ], tot_loss[loss=0.01, over 22192.90 frames. ], batch size: 4
|
||
2023-07-27 12:51:27,221 INFO [train.py:444] Epoch 14, validation loss=0.01082, over 18067.00 frames.
|
||
2023-07-27 12:51:27,970 INFO [train.py:422] Epoch 14, batch 20, loss[loss=0.01169, over 2695.00 frames. ], tot_loss[loss=0.01054, over 34971.47 frames. ], batch size: 5
|
||
2023-07-27 12:51:28,247 INFO [train.py:444] Epoch 14, validation loss=0.01073, over 18067.00 frames.
|
||
2023-07-27 12:51:28,323 INFO [checkpoint.py:75] Saving checkpoint to tdnn/exp/epoch-14.pt
|
||
2023-07-27 12:51:28,326 INFO [train.py:555] Done!
|
||
</pre></div>
|
||
</div>
|
||
</section>
|
||
<section id="decoding">
|
||
<h3>Decoding<a class="headerlink" href="#decoding" title="Permalink to this heading"></a></h3>
|
||
<p>Let us use the trained model to decode the test set:</p>
|
||
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>(test-icefall) kuangfangjun:ASR$ ./tdnn/decode.py
|
||
|
||
2023-07-27 12:55:12,840 INFO [decode.py:263] Decoding started
|
||
2023-07-27 12:55:12,840 INFO [decode.py:264] {'exp_dir': PosixPath('tdnn/exp'), 'lang_dir': PosixPath('data/lang_phone'), 'lm_dir': PosixPath('data/lm'), 'feature_dim': 23, 'search_beam': 20, 'output_beam': 8, 'min_active_states': 30, 'max_active_states': 10000, 'use_double_scores': True, 'epoch': 14, 'avg': 2, 'export': False, 'feature_dir': PosixPath('data/fbank'), 'max_duration': 30.0, 'bucketing_sampler': False, 'num_buckets': 10, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': False, 'return_cuts': True, 'num_workers': 2, 'env_info': {'k2-version': '1.24.3', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '4c05309499a08454997adf500b56dcc629e35ae5', 'k2-git-date': 'Tue Jul 25 16:23:36 2023', 'lhotse-version': '1.16.0.dev+git.7640d66.clean', 'torch-version': '1.13.0+cu116', 'torch-cuda-available': False, 'torch-cuda-version': '11.6', 'python-version': '3.8', 'icefall-git-branch': 'master', 'icefall-git-sha1': '3fb0a43-clean', 'icefall-git-date': 'Thu Jul 27 12:36:05 2023', 'icefall-path': '/tmp/icefall', 'k2-path': '/star-fj/fangjun/test-icefall/lib/python3.8/site-packages/k2/__init__.py', 'lhotse-path': '/star-fj/fangjun/test-icefall/lib/python3.8/site-packages/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-1-1220091118-57c4d55446-sph26', 'IP address': '10.177.77.20'}}
|
||
2023-07-27 12:55:12,841 INFO [lexicon.py:168] Loading pre-compiled data/lang_phone/Linv.pt
|
||
2023-07-27 12:55:12,855 INFO [decode.py:273] device: cpu
|
||
2023-07-27 12:55:12,868 INFO [decode.py:291] averaging ['tdnn/exp/epoch-13.pt', 'tdnn/exp/epoch-14.pt']
|
||
2023-07-27 12:55:12,882 INFO [asr_datamodule.py:218] About to get test cuts
|
||
2023-07-27 12:55:12,883 INFO [asr_datamodule.py:252] About to get test cuts
|
||
2023-07-27 12:55:13,157 INFO [decode.py:204] batch 0/?, cuts processed until now is 4
|
||
2023-07-27 12:55:13,701 INFO [decode.py:241] The transcripts are stored in tdnn/exp/recogs-test_set.txt
|
||
2023-07-27 12:55:13,702 INFO [utils.py:564] [test_set] %WER 0.42% [1 / 240, 0 ins, 1 del, 0 sub ]
|
||
2023-07-27 12:55:13,704 INFO [decode.py:249] Wrote detailed error stats to tdnn/exp/errs-test_set.txt
|
||
2023-07-27 12:55:13,704 INFO [decode.py:316] Done!
|
||
</pre></div>
|
||
</div>
|
||
<p><strong>Congratulations!</strong> You have successfully setup the environment and have run the first recipe in <a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a>.</p>
|
||
<p>Have fun with <code class="docutils literal notranslate"><span class="pre">icefall</span></code>!</p>
|
||
</section>
|
||
</section>
|
||
<section id="youtube-video">
|
||
<h2>YouTube Video<a class="headerlink" href="#youtube-video" title="Permalink to this heading"></a></h2>
|
||
<p>We provide the following YouTube video showing how to install <a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a>.
|
||
It also shows how to debug various problems that you may encounter while
|
||
using <a class="reference external" href="https://github.com/k2-fsa/icefall">icefall</a>.</p>
|
||
<div class="admonition note">
|
||
<p class="admonition-title">Note</p>
|
||
<p>To get the latest news of <a class="reference external" href="https://github.com/k2-fsa">next-gen Kaldi</a>, please subscribe
|
||
the following YouTube channel by <a class="reference external" href="https://www.youtube.com/channel/UC_VaumpkmINz1pNkFXAN9mw">Nadira Povey</a>:</p>
|
||
<blockquote>
|
||
<div><p><a class="reference external" href="https://www.youtube.com/channel/UC_VaumpkmINz1pNkFXAN9mw">https://www.youtube.com/channel/UC_VaumpkmINz1pNkFXAN9mw</a></p>
|
||
</div></blockquote>
|
||
</div>
|
||
<div class="video_wrapper" style="">
|
||
<iframe allowfullscreen="true" src="https://www.youtube.com/embed/LVmrBD0tLfE" style="border: 0; height: 345px; width: 560px">
|
||
</iframe></div></section>
|
||
</section>
|
||
|
||
|
||
</div>
|
||
</div>
|
||
<footer><div class="rst-footer-buttons" role="navigation" aria-label="Footer">
|
||
<a href="../index.html" class="btn btn-neutral float-left" title="Icefall" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left" aria-hidden="true"></span> Previous</a>
|
||
<a href="../docker/index.html" class="btn btn-neutral float-right" title="Docker" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right" aria-hidden="true"></span></a>
|
||
</div>
|
||
|
||
<hr/>
|
||
|
||
<div role="contentinfo">
|
||
<p>© Copyright 2021, icefall development team.</p>
|
||
</div>
|
||
|
||
Built with <a href="https://www.sphinx-doc.org/">Sphinx</a> using a
|
||
<a href="https://github.com/readthedocs/sphinx_rtd_theme">theme</a>
|
||
provided by <a href="https://readthedocs.org">Read the Docs</a>.
|
||
|
||
|
||
</footer>
|
||
</div>
|
||
</div>
|
||
</section>
|
||
</div>
|
||
<script>
|
||
jQuery(function () {
|
||
SphinxRtdTheme.Navigation.enable(true);
|
||
});
|
||
</script>
|
||
|
||
</body>
|
||
</html> |