mirror of
https://github.com/csukuangfj/kaldifeat.git
synced 2025-08-08 09:32:18 +00:00
support torch 2.3.1 (#100)
This commit is contained in:
parent
40cc0a4a2c
commit
f36767ed1d
3
.github/workflows/build-doc.yml
vendored
3
.github/workflows/build-doc.yml
vendored
@ -54,6 +54,7 @@ jobs:
|
||||
ls -lh $KALDIFEAT_DIR
|
||||
|
||||
export GIT_LFS_SKIP_SMUDGE=1
|
||||
export GIT_CLONE_PROTECTION_ACTIVE=false
|
||||
git clone https://huggingface.co/csukuangfj/kaldifeat huggingface
|
||||
cd huggingface
|
||||
|
||||
@ -68,6 +69,8 @@ jobs:
|
||||
make html
|
||||
cp source/cpu.html build/html/
|
||||
cp source/cuda.html build/html/
|
||||
cp source/cpu-cn.html build/html/
|
||||
cp source/cuda-cn.html build/html/
|
||||
touch build/html/.nojekyll
|
||||
|
||||
- name: Deploy
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -6,3 +6,5 @@ __pycache__/
|
||||
test-1hour.wav
|
||||
path.sh
|
||||
torch_version.py
|
||||
cpu*.html
|
||||
cuda*.html
|
||||
|
@ -9,6 +9,11 @@ You can find pre-compiled wheels at
|
||||
We give a few examples below to show you how to install `kaldifeat`_ from
|
||||
pre-compiled wheels.
|
||||
|
||||
.. hint::
|
||||
|
||||
The following lists only some examples. We suggest that you always select the
|
||||
latest version of ``kaldifeat``.
|
||||
|
||||
Linux (CPU)
|
||||
-----------
|
||||
|
||||
@ -26,11 +31,19 @@ you can use one of the following methods:
|
||||
pip install torch==2.2.0+cpu -f https://download.pytorch.org/whl/torch_stable.html
|
||||
pip install kaldifeat==1.25.4.dev20240210+cpu.torch2.2.0 -f https://csukuangfj.github.io/kaldifeat/cpu.html
|
||||
|
||||
# For users from China
|
||||
# 中国国内用户,如果访问不了 huggingface, 请使用
|
||||
# pip install kaldifeat==1.25.4.dev20240210+cpu.torch2.2.0 -f https://csukuangfj.github.io/kaldifeat/cpu-cn.html
|
||||
|
||||
# method 2
|
||||
pip install torch==2.2.0+cpu -f https://download.pytorch.org/whl/torch_stable.html
|
||||
wget https://huggingface.co/csukuangfj/kaldifeat/resolve/main/ubuntu-cpu/kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
||||
pip install ./kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
||||
|
||||
# For users from China
|
||||
# 中国国内用户,如果访问不了 huggingface, 请使用
|
||||
# wget https://hf-mirror.com/csukuangfj/kaldifeat/resolve/main/ubuntu-cpu/kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
||||
|
||||
pip install ./kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
||||
|
||||
Windows (CPU)
|
||||
--------------
|
||||
@ -49,9 +62,18 @@ you can use one of the following methods:
|
||||
pip install torch==2.2.0+cpu -f https://download.pytorch.org/whl/torch_stable.html
|
||||
pip install kaldifeat==1.25.4.dev20240210+cpu.torch2.2.0 -f https://csukuangfj.github.io/kaldifeat/cpu.html
|
||||
|
||||
# For users from China
|
||||
# 中国国内用户,如果访问不了 huggingface, 请使用
|
||||
# pip install kaldifeat==1.25.4.dev20240210+cpu.torch2.2.0 -f https://csukuangfj.github.io/kaldifeat/cpu-cn.html
|
||||
|
||||
# method 2
|
||||
pip install torch==2.2.0+cpu -f https://download.pytorch.org/whl/torch_stable.html
|
||||
wget https://huggingface.co/csukuangfj/kaldifeat/resolve/main/windows-cpu/kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-win_amd64.whl
|
||||
|
||||
# For users from China
|
||||
# 中国国内用户,如果访问不了 huggingface, 请使用
|
||||
# wget https://hf-mirror.com/csukuangfj/kaldifeat/resolve/main/windows-cpu/kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-win_amd64.whl
|
||||
|
||||
pip install ./kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-win_amd64.whl
|
||||
|
||||
macOS (CPU)
|
||||
@ -71,9 +93,18 @@ you can use one of the following methods:
|
||||
pip install torch==2.2.0
|
||||
pip install kaldifeat==1.25.4.dev20240210+cpu.torch2.2.0 -f https://csukuangfj.github.io/kaldifeat/cpu.html
|
||||
|
||||
# For users from China
|
||||
# 中国国内用户,如果访问不了 huggingface, 请使用
|
||||
# pip install kaldifeat==1.25.4.dev20240210+cpu.torch2.2.0 -f https://csukuangfj.github.io/kaldifeat/cpu-cn.html
|
||||
|
||||
# method 2
|
||||
pip install torch==2.2.0 -f https://download.pytorch.org/whl/torch_stable.html
|
||||
wget https://huggingface.co/csukuangfj/kaldifeat/resolve/main/macos/kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-macosx_12_0_universal2.whl
|
||||
|
||||
# For users from China
|
||||
# 中国国内用户,如果访问不了 huggingface, 请使用
|
||||
# wget https://hf-mirror.com/csukuangfj/kaldifeat/resolve/main/macos/kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-macosx_12_0_universal2.whl
|
||||
|
||||
pip install ./kaldifeat-1.25.4.dev20240210+cpu.torch2.2.0-cp312-cp312-macosx_12_0_universal2.whl
|
||||
|
||||
Linux (CUDA)
|
||||
@ -93,7 +124,16 @@ you can use one of the following methods:
|
||||
pip install torch==2.2.0+cu121 -f https://download.pytorch.org/whl/torch_stable.html
|
||||
pip install kaldifeat==1.25.4.dev20240210+cuda12.1.torch2.2.0 -f https://csukuangfj.github.io/kaldifeat/cuda.html
|
||||
|
||||
# For users from China
|
||||
# 中国国内用户,如果访问不了 huggingface, 请使用
|
||||
# pip install kaldifeat==1.25.4.dev20240210+cuda12.1.torch2.2.0 -f https://csukuangfj.github.io/kaldifeat/cuda-cn.html
|
||||
|
||||
# method 2
|
||||
pip install torch==2.2.0+cu121 -f https://download.pytorch.org/whl/torch_stable.html
|
||||
wget https://huggingface.co/csukuangfj/kaldifeat/resolve/main/ubuntu-cuda/kaldifeat-1.25.4.dev20240210+cuda12.1.torch2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
||||
|
||||
# For users from China
|
||||
# 中国国内用户,如果访问不了 huggingface, 请使用
|
||||
# wget https://hf-mirror.com/csukuangfj/kaldifeat/resolve/main/ubuntu-cuda/kaldifeat-1.25.4.dev20240210+cuda12.1.torch2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
||||
|
||||
pip install ./kaldifeat-1.25.4.dev20240210+cuda12.1.torch2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
||||
|
@ -201,10 +201,16 @@ def generate_build_matrix(enable_cuda, for_windows, for_macos, test_only_latest_
|
||||
if not for_windows
|
||||
else ["11.8.0", "12.1.0"],
|
||||
},
|
||||
"2.3.1": {
|
||||
"python-version": ["3.8", "3.9", "3.10", "3.11", "3.12"],
|
||||
"cuda": ["11.8", "12.1"] # default 12.1
|
||||
if not for_windows
|
||||
else ["11.8.0", "12.1.0"],
|
||||
},
|
||||
# https://github.com/Jimver/cuda-toolkit/blob/master/src/links/windows-links.ts
|
||||
}
|
||||
if test_only_latest_torch:
|
||||
latest = "2.3.0"
|
||||
latest = "2.3.1"
|
||||
matrix = {latest: matrix[latest]}
|
||||
|
||||
if for_windows or for_macos:
|
||||
|
Loading…
x
Reference in New Issue
Block a user