Add PLP features.

This commit is contained in:
Fangjun Kuang 2021-07-18 19:46:14 +08:00
parent 3b53580271
commit 62c717e916
37 changed files with 1455 additions and 123 deletions

View File

@ -2,7 +2,9 @@
set(kaldifeat_srcs set(kaldifeat_srcs
feature-fbank.cc feature-fbank.cc
feature-functions.cc
feature-mfcc.cc feature-mfcc.cc
feature-plp.cc
feature-spectrogram.cc feature-spectrogram.cc
feature-window.cc feature-window.cc
matrix-functions.cc matrix-functions.cc

View File

@ -18,7 +18,7 @@ class OfflineFeatureTpl {
// Note: feature_window_function_ is the windowing function, which initialized // Note: feature_window_function_ is the windowing function, which initialized
// using the options class, that we cache at this level. // using the options class, that we cache at this level.
OfflineFeatureTpl(const Options &opts) explicit OfflineFeatureTpl(const Options &opts)
: computer_(opts), : computer_(opts),
feature_window_function_(computer_.GetFrameOptions(), opts.device) {} feature_window_function_(computer_.GetFrameOptions(), opts.device) {}

View File

@ -0,0 +1,33 @@
// kaldifeat/csrc/feature-functions.cc
//
// Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
// This file is copied/modified from kaldi/src/feat/feature-functions.cc
#include "kaldifeat/csrc/feature-functions.h"
#include <cmath>
namespace kaldifeat {
void InitIdftBases(int32_t n_bases, int32_t dimension, torch::Tensor *mat_out) {
float angle = M_PI / (dimension - 1);
float scale = 1.0f / (2 * (dimension - 1));
*mat_out = torch::empty({n_bases, dimension}, torch::kFloat);
float *data = mat_out->data_ptr<float>();
int32_t stride = mat_out->stride(0);
for (int32_t i = 0; i < n_bases; ++i) {
float *this_row = data + i * stride;
this_row[0] = scale;
for (int32_t j = 1; j < dimension - 1; ++j) {
this_row[j] = 2 * scale * std::cos(angle * i * j);
}
this_row[dimension - 1] = scale * std::cos(angle * i * (dimension - 1));
}
}
} // namespace kaldifeat

View File

@ -0,0 +1,18 @@
// kaldifeat/csrc/feature-functions.h
//
// Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
// This file is copied/modified from kaldi/src/feat/feature-functions.h
#ifndef KALDIFEAT_CSRC_FEATURE_FUNCTIONS_H_
#define KALDIFEAT_CSRC_FEATURE_FUNCTIONS_H_
#include "torch/torch.h"
namespace kaldifeat {
void InitIdftBases(int32_t n_bases, int32_t dimension, torch::Tensor *mat_out);
}
#endif // KALDIFEAT_CSRC_FEATURE_FUNCTIONS_H_

View File

@ -31,7 +31,7 @@ MfccComputer::MfccComputer(const MfccOptions &opts) : opts_(opts) {
// energy we replace this with the energy. This means a different // energy we replace this with the energy. This means a different
// ordering of features than HTK. // ordering of features than HTK.
using namespace torch::indexing; // It imports: Slice, None using namespace torch::indexing; // It imports: Slice, None // NOLINT
// dct_matrix[:opts.num_cepts, :] // dct_matrix[:opts.num_cepts, :]
torch::Tensor dct_rows = torch::Tensor dct_rows =

View File

@ -7,6 +7,9 @@
#ifndef KALDIFEAT_CSRC_FEATURE_MFCC_H_ #ifndef KALDIFEAT_CSRC_FEATURE_MFCC_H_
#define KALDIFEAT_CSRC_FEATURE_MFCC_H_ #define KALDIFEAT_CSRC_FEATURE_MFCC_H_
#include <map>
#include <string>
#include "kaldifeat/csrc/feature-common.h" #include "kaldifeat/csrc/feature-common.h"
#include "kaldifeat/csrc/feature-window.h" #include "kaldifeat/csrc/feature-window.h"
#include "kaldifeat/csrc/mel-computations.h" #include "kaldifeat/csrc/mel-computations.h"

View File

@ -0,0 +1,173 @@
// kaldifeat/csrc/feature-plp.cc
//
// Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
// This file is copied/modified from kaldi/src/feat/feature-plp.cc
#include "kaldifeat/csrc/feature-plp.h"
#include "kaldifeat/csrc/feature-functions.h"
#include "torch/torch.h"
namespace kaldifeat {
std::ostream &operator<<(std::ostream &os, const PlpOptions &opts) {
os << opts.ToString();
return os;
}
PlpComputer::PlpComputer(const PlpOptions &opts) : opts_(opts) {
// our num-ceps includes C0.
KALDIFEAT_ASSERT(opts_.num_ceps <= opts_.lpc_order + 1);
if (opts.cepstral_lifter != 0.0) {
lifter_coeffs_ = torch::empty({1, opts.num_ceps}, torch::kFloat32);
ComputeLifterCoeffs(opts.cepstral_lifter, &lifter_coeffs_);
lifter_coeffs_ = lifter_coeffs_.to(opts.device);
}
InitIdftBases(opts_.lpc_order + 1, opts_.mel_opts.num_bins + 2, &idft_bases_);
// CAUTION: we save a transposed version of idft_bases_
idft_bases_ = idft_bases_.to(opts.device).t();
if (opts.energy_floor > 0.0) log_energy_floor_ = logf(opts.energy_floor);
// We'll definitely need the filterbanks info for VTLN warping factor 1.0.
// [note: this call caches it.]
GetMelBanks(1.0);
}
PlpComputer::~PlpComputer() {
for (auto iter = mel_banks_.begin(); iter != mel_banks_.end(); ++iter)
delete iter->second;
for (auto iter = equal_loudness_.begin(); iter != equal_loudness_.end();
++iter)
delete iter->second;
}
const MelBanks *PlpComputer::GetMelBanks(float vtln_warp) {
MelBanks *this_mel_banks = nullptr;
// std::map<float, MelBanks *>::iterator iter = mel_banks_.find(vtln_warp);
auto iter = mel_banks_.find(vtln_warp);
if (iter == mel_banks_.end()) {
this_mel_banks =
new MelBanks(opts_.mel_opts, opts_.frame_opts, vtln_warp, opts_.device);
mel_banks_[vtln_warp] = this_mel_banks;
} else {
this_mel_banks = iter->second;
}
return this_mel_banks;
}
const torch::Tensor *PlpComputer::GetEqualLoudness(float vtln_warp) {
const MelBanks *this_mel_banks = GetMelBanks(vtln_warp);
torch::Tensor *ans = NULL;
auto iter = equal_loudness_.find(vtln_warp);
if (iter == equal_loudness_.end()) {
ans = new torch::Tensor;
GetEqualLoudnessVector(*this_mel_banks, ans);
*ans = ans->to(opts_.device);
equal_loudness_[vtln_warp] = ans;
} else {
ans = iter->second;
}
return ans;
}
// ans.shape [signal_frame.size(0), this->Dim()]
torch::Tensor PlpComputer::Compute(torch::Tensor signal_raw_log_energy,
float vtln_warp,
const torch::Tensor &signal_frame) {
KALDIFEAT_ASSERT(signal_frame.dim() == 2);
KALDIFEAT_ASSERT(signal_frame.size(1) == opts_.frame_opts.PaddedWindowSize());
const MelBanks &mel_banks = *GetMelBanks(vtln_warp);
const torch::Tensor &equal_loudness = *GetEqualLoudness(vtln_warp);
// torch.finfo(torch.float32).eps
constexpr float kEps = 1.1920928955078125e-07f;
// Compute energy after window function (not the raw one).
if (opts_.use_energy && !opts_.raw_energy) {
signal_raw_log_energy =
torch::clamp_min(signal_frame.pow(2).sum(1), kEps).log();
}
// note spectrum is in magnitude, not power, because of `abs()`
torch::Tensor spectrum = torch::fft::rfft(signal_frame).abs();
// remove the last column, i.e., the highest fft bin
spectrum = spectrum.index(
{"...", torch::indexing::Slice(0, -1, torch::indexing::None)});
// Use power instead of magnitude
spectrum = spectrum.pow(2);
torch::Tensor mel_energies = mel_banks.Compute(spectrum);
mel_energies = torch::mul(mel_energies, equal_loudness);
mel_energies = mel_energies.pow(opts_.compress_factor);
// duplicate first and last elements
//
// left_padding = wave[:num_left_padding].flip(dims=(0,))
// first = mel_energies[:, 0]
// first.shape [num_frames, 1]
torch::Tensor first = mel_energies.index({"...", 0}).unsqueeze(-1);
// last = mel_energies[:, -1]
// last.shape [num_frames, 1]
torch::Tensor last = mel_energies.index({"...", -1}).unsqueeze(-1);
mel_energies = torch::cat({first, mel_energies, last}, 1);
torch::Tensor autocorr_coeffs = torch::mm(mel_energies, idft_bases_);
torch::Tensor lpc_coeffs;
torch::Tensor residual_log_energy = ComputeLpc(autocorr_coeffs, &lpc_coeffs);
residual_log_energy = torch::clamp_min(residual_log_energy, kEps);
torch::Tensor raw_cepstrum = Lpc2Cepstrum(lpc_coeffs);
// torch.cat((residual_log_energy.unsqueeze(-1),
// raw_cepstrum[:opts.num_ceps-1]), 1)
//
using namespace torch::indexing; // It imports: Slice, None // NOLINT
torch::Tensor features = torch::cat(
{residual_log_energy.unsqueeze(-1),
raw_cepstrum.index({"...", Slice(0, opts_.num_ceps - 1, None)})},
1);
if (opts_.cepstral_lifter != 0.0) {
features = torch::mul(features, lifter_coeffs_);
}
if (opts_.cepstral_scale != 1.0) {
features = features * opts_.cepstral_scale;
}
if (opts_.use_energy) {
if (opts_.energy_floor > 0.0f) {
signal_raw_log_energy =
torch::clamp_min(signal_raw_log_energy, log_energy_floor_);
}
// column 0 is replaced by signal_raw_log_energy
//
// features[:, 0] = signal_raw_log_energy
//
features.index({"...", 0}) = signal_raw_log_energy;
}
if (opts_.htk_compat) { // reorder the features.
// shift left, so the original 0th column
// becomes the last column;
// the original first column becomes the 0th column
features = torch::roll(features, -1, 1);
}
return features;
}
} // namespace kaldifeat

View File

@ -0,0 +1,131 @@
// kaldifeat/csrc/feature-plp.h
//
// Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
// This file is copied/modified from kaldi/src/feat/feature-plp.h
#ifndef KALDIFEAT_CSRC_FEATURE_PLP_H_
#define KALDIFEAT_CSRC_FEATURE_PLP_H_
#include <map>
#include <string>
#include "kaldifeat/csrc/feature-common.h"
#include "kaldifeat/csrc/feature-window.h"
#include "kaldifeat/csrc/mel-computations.h"
#include "torch/torch.h"
namespace kaldifeat {
/// PlpOptions contains basic options for computing PLP features.
/// It only includes things that can be done in a "stateless" way, i.e.
/// it does not include energy max-normalization.
/// It does not include delta computation.
struct PlpOptions {
FrameExtractionOptions frame_opts;
MelBanksOptions mel_opts;
// Order of LPC analysis in PLP computation
//
// 12 seems to be common for 16kHz-sampled data. For 8kHz-sampled
// data, 15 may be better.
int32_t lpc_order = 12;
// Number of cepstra in PLP computation (including C0)
int32_t num_ceps = 13;
bool use_energy = true; // use energy; else C0
// Floor on energy (absolute, not relative) in PLP computation.
// Only makes a difference if --use-energy=true; only necessary if
// dither is 0.0. Suggested values: 0.1 or 1.0
float energy_floor = 0.0;
// If true, compute energy before preemphasis and windowing
bool raw_energy = true;
// Compression factor in PLP computation
float compress_factor = 0.33333;
// Constant that controls scaling of PLPs
int32_t cepstral_lifter = 22;
// Scaling constant in PLP computation
float cepstral_scale = 1.0;
bool htk_compat = false; // if true, put energy/C0 last and introduce a
// factor of sqrt(2) on C0 to be the same as HTK.
//
torch::Device device{"cpu"};
PlpOptions() { mel_opts.num_bins = 23; }
std::string ToString() const {
std::ostringstream os;
os << "frame_opts: \n";
os << frame_opts << "\n";
os << "\n";
os << "mel_opts: \n";
os << mel_opts << "\n";
os << "lpc_order: " << lpc_order << "\n";
os << "num_ceps: " << num_ceps << "\n";
os << "use_energy: " << use_energy << "\n";
os << "energy_floor: " << energy_floor << "\n";
os << "raw_energy: " << raw_energy << "\n";
os << "compress_factor: " << compress_factor << "\n";
os << "cepstral_lifter: " << cepstral_lifter << "\n";
os << "cepstral_scale: " << cepstral_scale << "\n";
os << "htk_compat: " << htk_compat << "\n";
os << "device: " << device << "\n";
return os.str();
}
};
std::ostream &operator<<(std::ostream &os, const PlpOptions &opts);
class PlpComputer {
public:
using Options = PlpOptions;
explicit PlpComputer(const PlpOptions &opts);
~PlpComputer();
PlpComputer &operator=(const PlpComputer &) = delete;
PlpComputer(const PlpComputer &) = delete;
int32_t Dim() const { return opts_.num_ceps; }
bool NeedRawLogEnergy() const { return opts_.use_energy && opts_.raw_energy; }
const FrameExtractionOptions &GetFrameOptions() const {
return opts_.frame_opts;
}
const PlpOptions &GetOptions() const { return opts_; }
// signal_raw_log_energy is log_energy_pre_window, which is not empty
// iff NeedRawLogEnergy() returns true.
torch::Tensor Compute(torch::Tensor signal_raw_log_energy, float vtln_warp,
const torch::Tensor &signal_frame);
private:
const MelBanks *GetMelBanks(float vtln_warp);
const torch::Tensor *GetEqualLoudness(float vtln_warp);
PlpOptions opts_;
torch::Tensor lifter_coeffs_;
torch::Tensor idft_bases_; // 2-D tensor, kFloat. Caution: it is transposed
float log_energy_floor_;
std::map<float, MelBanks *> mel_banks_; // float is VTLN coefficient.
// value is a 1-D torch.Tensor
std::map<float, torch::Tensor *> equal_loudness_;
};
using Plp = OfflineFeatureTpl<PlpComputer>;
} // namespace kaldifeat
#endif // KALDIFEAT_CSRC_FEATURE_PLP_H_

View File

@ -7,6 +7,7 @@
#include "kaldifeat/csrc/feature-window.h" #include "kaldifeat/csrc/feature-window.h"
#include <cmath> #include <cmath>
#include <vector>
#include "torch/torch.h" #include "torch/torch.h"
@ -168,7 +169,7 @@ torch::Tensor Dither(const torch::Tensor &wave, float dither_value) {
} }
torch::Tensor Preemphasize(float preemph_coeff, const torch::Tensor &wave) { torch::Tensor Preemphasize(float preemph_coeff, const torch::Tensor &wave) {
using namespace torch::indexing; // It imports: Slice, None using namespace torch::indexing; // It imports: Slice, None // NOLINT
if (preemph_coeff == 0.0f) return wave; if (preemph_coeff == 0.0f) return wave;
KALDIFEAT_ASSERT(preemph_coeff >= 0.0f && preemph_coeff <= 1.0f); KALDIFEAT_ASSERT(preemph_coeff >= 0.0f && preemph_coeff <= 1.0f);

View File

@ -4,6 +4,8 @@
// This file is copied/modified from kaldi/src/feat/feature-window.h // This file is copied/modified from kaldi/src/feat/feature-window.h
#include <string>
#include "kaldifeat/csrc/log.h" #include "kaldifeat/csrc/log.h"
#include "torch/torch.h" #include "torch/torch.h"

View File

@ -3,9 +3,11 @@
// Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang) // Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
// //
// This file is copied/modified from kaldi/src/feat/mel-computations.cc // This file is copied/modified from kaldi/src/feat/mel-computations.cc
//
#include "kaldifeat/csrc/mel-computations.h" #include "kaldifeat/csrc/mel-computations.h"
#include <algorithm>
#include "kaldifeat/csrc/feature-window.h" #include "kaldifeat/csrc/feature-window.h"
namespace kaldifeat { namespace kaldifeat {
@ -136,9 +138,14 @@ MelBanks::MelBanks(const MelBanksOptions &opts,
<< " and vtln-high " << vtln_high << ", versus " << " and vtln-high " << vtln_high << ", versus "
<< "low-freq " << low_freq << " and high-freq " << high_freq; << "low-freq " << low_freq << " and high-freq " << high_freq;
// we will transpose bins_mat_ at the end of this funciton
bins_mat_ = torch::zeros({num_bins, num_fft_bins}, torch::kFloat); bins_mat_ = torch::zeros({num_bins, num_fft_bins}, torch::kFloat);
int32_t stride = bins_mat_.strides()[0]; int32_t stride = bins_mat_.strides()[0];
center_freqs_ = torch::empty({num_bins}, torch::kFloat);
float *center_freqs_data = center_freqs_.data_ptr<float>();
for (int32_t bin = 0; bin < num_bins; ++bin) { for (int32_t bin = 0; bin < num_bins; ++bin) {
float left_mel = mel_low_freq + bin * mel_freq_delta, float left_mel = mel_low_freq + bin * mel_freq_delta,
center_mel = mel_low_freq + (bin + 1) * mel_freq_delta, center_mel = mel_low_freq + (bin + 1) * mel_freq_delta,
@ -152,6 +159,7 @@ MelBanks::MelBanks(const MelBanksOptions &opts,
right_mel = VtlnWarpMelFreq(vtln_low, vtln_high, low_freq, high_freq, right_mel = VtlnWarpMelFreq(vtln_low, vtln_high, low_freq, high_freq,
vtln_warp_factor, right_mel); vtln_warp_factor, right_mel);
} }
center_freqs_data[bin] = InverseMelScale(center_mel);
// this_bin will be a vector of coefficients that is only // this_bin will be a vector of coefficients that is only
// nonzero where this mel bin is active. // nonzero where this mel bin is active.
float *this_bin = bins_mat_.data_ptr<float>() + bin * stride; float *this_bin = bins_mat_.data_ptr<float>() + bin * stride;
@ -203,4 +211,141 @@ void ComputeLifterCoeffs(float Q, torch::Tensor *coeffs) {
} }
} }
void GetEqualLoudnessVector(const MelBanks &mel_banks, torch::Tensor *ans) {
int32_t n = mel_banks.NumBins();
// Central frequency of each mel bin.
const torch::Tensor &f0 = mel_banks.GetCenterFreqs();
const float *f0_data = f0.data_ptr<float>();
*ans = torch::empty({1, n}, torch::kFloat);
float *ans_data = ans->data_ptr<float>();
for (int32_t i = 0; i < n; ++i) {
float fsq = f0_data[i] * f0_data[i];
float fsub = fsq / (fsq + 1.6e5);
ans_data[i] = fsub * fsub * ((fsq + 1.44e6) / (fsq + 9.61e6));
}
}
// Durbin's recursion - converts autocorrelation coefficients to the LPC
// pTmp - temporal place [n]
// pAC - autocorrelation coefficients [n + 1]
// pLP - linear prediction coefficients [n]
// (predicted_sn = sum_1^P{a[i-1] * s[n-i]}})
// F(z) = 1 / (1 - A(z)), 1 is not stored in the denominator
static float Durbin(int n, const float *pAC, float *pLP, float *pTmp) {
float ki; // reflection coefficient
int i;
int j;
float E = pAC[0];
for (i = 0; i < n; ++i) {
// next reflection coefficient
ki = pAC[i + 1];
for (j = 0; j < i; ++j) ki += pLP[j] * pAC[i - j];
ki = ki / E;
// new error
float c = 1 - ki * ki;
if (c < 1.0e-5) // remove NaNs for constant signal
c = 1.0e-5;
E *= c;
// new LP coefficients
pTmp[i] = -ki;
for (j = 0; j < i; ++j) pTmp[j] = pLP[j] - ki * pLP[i - j - 1];
for (j = 0; j <= i; ++j) pLP[j] = pTmp[j];
}
return E;
}
// Compute LP coefficients from autocorrelation coefficients.
torch::Tensor ComputeLpc(const torch::Tensor &autocorr_in,
torch::Tensor *lpc_out) {
KALDIFEAT_ASSERT(autocorr_in.dim() == 2);
int32_t num_frames = autocorr_in.size(0);
int32_t lpc_order = autocorr_in.size(1) - 1;
*lpc_out = torch::empty({num_frames, lpc_order}, torch::kFloat);
torch::Tensor ans = torch::empty({num_frames}, torch::kFloat);
// TODO(fangjun): Durbin runs only on CPU. Implement a CUDA version
torch::Device saved_device = autocorr_in.device();
torch::Device cpu("cpu");
torch::Tensor in_cpu = autocorr_in.to(cpu);
torch::Tensor tmp = torch::empty_like(*lpc_out);
int32_t in_stride = in_cpu.stride(0);
int32_t ans_stride = ans.stride(0);
int32_t tmp_stride = tmp.stride(0);
int32_t lpc_stride = lpc_out->stride(0);
const float *in_data = in_cpu.data_ptr<float>();
float *ans_data = ans.data_ptr<float>();
float *tmp_data = tmp.data_ptr<float>();
float *lpc_data = lpc_out->data_ptr<float>();
// see
// https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/Parallel.h#L58
at::parallel_for(0, num_frames, 1, [&](int32_t begin, int32_t end) -> void {
for (int32_t i = begin; i != end; ++i) {
float ret = Durbin(lpc_order, in_data + i * in_stride,
lpc_data + i * lpc_stride, tmp_data + i * tmp_stride);
if (ret <= 0.0) KALDIFEAT_WARN << "Zero energy in LPC computation";
ans_data[i] = -logf(1.0 / ret); // forms the C0 value
}
});
*lpc_out = lpc_out->to(saved_device);
return ans.to(saved_device);
}
static void Lpc2CepstrumInternal(int n, const float *pLPC, float *pCepst) {
for (int32_t i = 0; i < n; ++i) {
double sum = 0.0;
for (int32_t j = 0; j < i; ++j) {
sum += (i - j) * pLPC[j] * pCepst[i - j - 1];
}
pCepst[i] = -pLPC[i] - sum / (i + 1);
}
}
torch::Tensor Lpc2Cepstrum(const torch::Tensor &lpc) {
KALDIFEAT_ASSERT(lpc.dim() == 2);
torch::Device cpu("cpu");
torch::Device saved_device = lpc.device();
// TODO(fangjun): support cuda
torch::Tensor in_cpu = lpc.to(cpu);
int32_t num_frames = in_cpu.size(0);
int32_t lpc_order = in_cpu.size(1);
const float *in_data = in_cpu.data_ptr<float>();
int32_t in_stride = in_cpu.stride(0);
torch::Tensor ans = torch::zeros({num_frames, lpc_order}, torch::kFloat);
int32_t ans_stride = ans.stride(0);
float *ans_data = ans.data_ptr<float>();
at::parallel_for(0, num_frames, 1, [&](int32_t begin, int32_t end) -> void {
for (int32_t i = begin; i != end; ++i) {
Lpc2CepstrumInternal(lpc_order, in_data + i * in_stride,
ans_data + i * ans_stride);
}
});
return ans.to(saved_device);
}
} // namespace kaldifeat } // namespace kaldifeat

View File

@ -5,6 +5,7 @@
// This file is copied/modified from kaldi/src/feat/mel-computations.h // This file is copied/modified from kaldi/src/feat/mel-computations.h
#include <cmath> #include <cmath>
#include <string>
#include "kaldifeat/csrc/feature-window.h" #include "kaldifeat/csrc/feature-window.h"
@ -74,7 +75,11 @@ class MelBanks {
const FrameExtractionOptions &frame_opts, float vtln_warp_factor, const FrameExtractionOptions &frame_opts, float vtln_warp_factor,
torch::Device device); torch::Device device);
int32_t NumBins() const { return static_cast<int32_t>(bins_mat_.size(0)); } // CAUTION: we save a transposed version of bins_mat_, so return size(1) here
int32_t NumBins() const { return static_cast<int32_t>(bins_mat_.size(1)); }
// returns vector of central freq of each bin; needed by plp code.
const torch::Tensor &GetCenterFreqs() const { return center_freqs_; }
torch::Tensor Compute(const torch::Tensor &spectrum) const; torch::Tensor Compute(const torch::Tensor &spectrum) const;
@ -82,9 +87,14 @@ class MelBanks {
const torch::Tensor &GetBinsMat() const { return bins_mat_; } const torch::Tensor &GetBinsMat() const { return bins_mat_; }
private: private:
// A 2-D matrix of shape [num_bins, num_fft_bins] // A 2-D matrix. Its shape is NOT [num_bins, num_fft_bins]
// Its shape is [num_fft_bins, num_bins].
torch::Tensor bins_mat_; torch::Tensor bins_mat_;
// center frequencies of bins, numbered from 0 ... num_bins-1.
// Needed by GetCenterFreqs().
torch::Tensor center_freqs_; // It's always on CPU
bool debug_; bool debug_;
bool htk_mode_; bool htk_mode_;
}; };
@ -96,6 +106,26 @@ class MelBanks {
// coeffs is a 1-D float tensor // coeffs is a 1-D float tensor
void ComputeLifterCoeffs(float Q, torch::Tensor *coeffs); void ComputeLifterCoeffs(float Q, torch::Tensor *coeffs);
void GetEqualLoudnessVector(const MelBanks &mel_banks, torch::Tensor *ans);
/* Compute LP coefficients from autocorrelation coefficients.
*
* @param [in] autocorr_in A 2-D tensor. Each row is a frame. Its number of
* columns is lpc_order + 1
* @param [out] lpc_coeffs A 2-D tensor. On return, it has as many rows as the
* input tensor. Its number of columns is lpc_order.
*
* @return Returns log energy of residual in a 1-D tensor. It has as many
* elements as the number of rows in `autocorr_in`.
*/
torch::Tensor ComputeLpc(const torch::Tensor &autocorr_in,
torch::Tensor *lpc_coeffs);
/*
* @param [in] lpc It is the output argument `lpc_coeffs` in ComputeLpc().
*/
torch::Tensor Lpc2Cepstrum(const torch::Tensor &lpc);
} // namespace kaldifeat } // namespace kaldifeat
#endif // KALDIFEAT_CSRC_MEL_COMPUTATIONS_H_ #endif // KALDIFEAT_CSRC_MEL_COMPUTATIONS_H_

View File

@ -4,8 +4,21 @@
// This file is copied/modified from kaldi/src/feat/pitch-functions.h // This file is copied/modified from kaldi/src/feat/pitch-functions.h
#ifndef KALDIFEAT_CSRC_FEATURE_PITCH_FUNCTIONS_H_ #ifndef KALDIFEAT_CSRC_PITCH_FUNCTIONS_H_
#define KALDIFEAT_CSRC_FEATURE_PITCH_FUNCTIONS_H_ #define KALDIFEAT_CSRC_PITCH_FUNCTIONS_H_
// References
//
// Talkin, David, and W. Bastiaan Kleijn. "A robust algorithm for pitch
// tracking (RAPT)." coding and synthesis 495 (1995): 518.
// (https://www.ee.columbia.edu/~dpwe/papers/Talkin95-rapt.pdf)
//
// Ghahremani, Pegah, et al. "A pitch extraction algorithm tuned for
// automatic speech recognition." 2014 IEEE international conference on
// acoustics, speech and signal processing (ICASSP). IEEE, 2014.
// (http://danielpovey.com/files/2014_icassp_pitch.pdf)
#include <string>
#include "torch/torch.h" #include "torch/torch.h"
@ -134,6 +147,8 @@ struct PitchExtractionOptions {
} }
}; };
// TODO(fangjun): Implement it
} // namespace kaldifeat } // namespace kaldifeat
#endif // KALDIFEAT_CSRC_FEATURE_PITCH_FUNCTIONS_H_ #endif // KALDIFEAT_CSRC_PITCH_FUNCTIONS_H_

View File

@ -2,6 +2,7 @@ add_definitions(-DTORCH_API_INCLUDE_EXTENSION_H)
pybind11_add_module(_kaldifeat pybind11_add_module(_kaldifeat
feature-fbank.cc feature-fbank.cc
feature-mfcc.cc feature-mfcc.cc
feature-plp.cc
feature-spectrogram.cc feature-spectrogram.cc
feature-window.cc feature-window.cc
kaldifeat.cc kaldifeat.cc

View File

@ -0,0 +1 @@
filter=-runtime/references

View File

@ -4,6 +4,8 @@
#include "kaldifeat/python/csrc/feature-mfcc.h" #include "kaldifeat/python/csrc/feature-mfcc.h"
#include <string>
#include "kaldifeat/csrc/feature-mfcc.h" #include "kaldifeat/csrc/feature-mfcc.h"
namespace kaldifeat { namespace kaldifeat {

View File

@ -0,0 +1,57 @@
// kaldifeat/python/csrc/feature-plp.cc
//
// Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
#include "kaldifeat/python/csrc/feature-plp.h"
#include <string>
#include "kaldifeat/csrc/feature-plp.h"
namespace kaldifeat {
void PybindPlpOptions(py::module &m) {
using PyClass = PlpOptions;
py::class_<PyClass>(m, "PlpOptions")
.def(py::init<>())
.def_readwrite("frame_opts", &PyClass::frame_opts)
.def_readwrite("mel_opts", &PyClass::mel_opts)
.def_readwrite("lpc_order", &PyClass::lpc_order)
.def_readwrite("num_ceps", &PyClass::num_ceps)
.def_readwrite("use_energy", &PyClass::use_energy)
.def_readwrite("energy_floor", &PyClass::energy_floor)
.def_readwrite("raw_energy", &PyClass::raw_energy)
.def_readwrite("compress_factor", &PyClass::compress_factor)
.def_readwrite("cepstral_lifter", &PyClass::cepstral_lifter)
.def_readwrite("cepstral_scale", &PyClass::cepstral_scale)
.def_readwrite("htk_compat", &PyClass::htk_compat)
.def_property(
"device",
[](const PyClass &self) -> py::object {
py::object ans = py::module_::import("torch").attr("device");
return ans(self.device.str());
},
[](PyClass &self, py::object obj) -> void {
std::string s = static_cast<py::str>(obj);
self.device = torch::Device(s);
})
.def("__str__",
[](const PyClass &self) -> std::string { return self.ToString(); });
}
static void PybindPlp(py::module &m) {
using PyClass = Plp;
py::class_<PyClass>(m, "Plp")
.def(py::init<const PlpOptions &>(), py::arg("opts"))
.def("dim", &PyClass::Dim)
.def_property_readonly("options", &PyClass::GetOptions)
.def("compute_features", &PyClass::ComputeFeatures, py::arg("wave"),
py::arg("vtln_warp"));
}
void PybindFeaturePlp(py::module &m) {
PybindPlpOptions(m);
PybindPlp(m);
}
} // namespace kaldifeat

View File

@ -0,0 +1,16 @@
// kaldifeat/python/csrc/feature-plp.h
//
// Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
#ifndef KALDIFEAT_PYTHON_CSRC_FEATURE_PLP_H_
#define KALDIFEAT_PYTHON_CSRC_FEATURE_PLP_H_
#include "kaldifeat/python/csrc/kaldifeat.h"
namespace kaldifeat {
void PybindFeaturePlp(py::module &m);
} // namespace kaldifeat
#endif // KALDIFEAT_PYTHON_CSRC_FEATURE_PLP_H_

View File

@ -4,6 +4,8 @@
#include "kaldifeat/python/csrc/feature-spectrogram.h" #include "kaldifeat/python/csrc/feature-spectrogram.h"
#include <string>
#include "kaldifeat/csrc/feature-spectrogram.h" #include "kaldifeat/csrc/feature-spectrogram.h"
namespace kaldifeat { namespace kaldifeat {

View File

@ -4,6 +4,8 @@
#include "kaldifeat/python/csrc/feature-window.h" #include "kaldifeat/python/csrc/feature-window.h"
#include <string>
#include "kaldifeat/csrc/feature-window.h" #include "kaldifeat/csrc/feature-window.h"
namespace kaldifeat { namespace kaldifeat {

View File

@ -7,6 +7,7 @@
#include "kaldifeat/csrc/feature-fbank.h" #include "kaldifeat/csrc/feature-fbank.h"
#include "kaldifeat/python/csrc/feature-fbank.h" #include "kaldifeat/python/csrc/feature-fbank.h"
#include "kaldifeat/python/csrc/feature-mfcc.h" #include "kaldifeat/python/csrc/feature-mfcc.h"
#include "kaldifeat/python/csrc/feature-plp.h"
#include "kaldifeat/python/csrc/feature-spectrogram.h" #include "kaldifeat/python/csrc/feature-spectrogram.h"
#include "kaldifeat/python/csrc/feature-window.h" #include "kaldifeat/python/csrc/feature-window.h"
#include "kaldifeat/python/csrc/mel-computations.h" #include "kaldifeat/python/csrc/mel-computations.h"
@ -21,6 +22,7 @@ PYBIND11_MODULE(_kaldifeat, m) {
PybindMelComputations(m); PybindMelComputations(m);
PybindFeatureFbank(m); PybindFeatureFbank(m);
PybindFeatureMfcc(m); PybindFeatureMfcc(m);
PybindFeaturePlp(m);
PybindFeatureSpectrogram(m); PybindFeatureSpectrogram(m);
} }

View File

@ -2,9 +2,11 @@
// //
// Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang) // Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
#include "kaldifeat/csrc/mel-computations.h" #include "kaldifeat/python/csrc/mel-computations.h"
#include "kaldifeat/python/csrc/feature-window.h" #include <string>
#include "kaldifeat/csrc/mel-computations.h"
namespace kaldifeat { namespace kaldifeat {
@ -21,7 +23,6 @@ static void PybindMelBanksOptions(py::module &m) {
.def_readwrite("htk_mode", &PyClass::htk_mode) .def_readwrite("htk_mode", &PyClass::htk_mode)
.def("__str__", .def("__str__",
[](const PyClass &self) -> std::string { return self.ToString(); }); [](const PyClass &self) -> std::string { return self.ToString(); });
;
} }
void PybindMelComputations(py::module &m) { PybindMelBanksOptions(m); } void PybindMelComputations(py::module &m) { PybindMelBanksOptions(m); }

View File

@ -4,9 +4,11 @@ from _kaldifeat import (
FrameExtractionOptions, FrameExtractionOptions,
MelBanksOptions, MelBanksOptions,
MfccOptions, MfccOptions,
PlpOptions,
SpectrogramOptions, SpectrogramOptions,
) )
from .fbank import Fbank from .fbank import Fbank
from .mfcc import Mfcc from .mfcc import Mfcc
from .plp import Plp
from .spectrogram import Spectrogram from .spectrogram import Spectrogram

View File

@ -0,0 +1,12 @@
# Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
import _kaldifeat
from .offline_feature import OfflineFeature
class Plp(OfflineFeature):
def __init__(self, opts: _kaldifeat.PlpOptions):
super().__init__(opts)
self.computer = _kaldifeat.Plp(opts)

View File

@ -33,6 +33,18 @@ if [ ! -f test-spectrogram.txt ]; then
compute-spectrogram-feats --dither=0 scp:test.scp ark,t:test-spectrogram.txt compute-spectrogram-feats --dither=0 scp:test.scp ark,t:test-spectrogram.txt
fi fi
if [ ! -f test-plp.txt ]; then
compute-plp-feats --dither=0 scp:test.scp ark,t:test-plp.txt
fi
if [ ! -f test-plp-no-snip-edges.txt ]; then
compute-plp-feats --dither=0 --snip-edges=0 scp:test.scp ark,t:test-plp-no-snip-edges.txt
fi
if [ ! -f test-plp-htk-10-cpes.txt ]; then
compute-plp-feats --dither=0 --htk-compat=1 --num-ceps=10 scp:test.scp ark,t:test-plp-htk-10-ceps.txt
fi
if [ ! -f test-spectrogram-no-snip-edges.txt ]; then if [ ! -f test-spectrogram-no-snip-edges.txt ]; then
compute-spectrogram-feats --dither=0 --snip-edges=0 scp:test.scp ark,t:test-spectrogram-no-snip-edges.txt compute-spectrogram-feats --dither=0 --snip-edges=0 scp:test.scp ark,t:test-spectrogram-no-snip-edges.txt
fi fi

View File

@ -0,0 +1,119 @@
1 [
1.413609 1.290824 0.4644783 -0.9542531 -2.107659 -3.068145 -2.939797 -2.310168 -0.8795097 25.38532
1.390788 1.282347 0.2231341 -0.9314669 -2.371381 -3.059567 -3.144366 -2.177221 -0.9286566 25.40668
1.388577 1.236941 0.1314043 -0.9991927 -2.458853 -3.084329 -3.129788 -2.174042 -0.9450245 25.39494
1.375442 1.173042 0.1502425 -1.142179 -2.355545 -3.236665 -3.001543 -2.267133 -0.87585 25.38965
1.393851 1.104497 0.141741 -1.257701 -2.464931 -3.169199 -3.066706 -2.124195 -0.9064269 25.40345
1.402755 1.074741 -0.02913864 -1.290972 -2.777904 -3.207083 -3.022014 -2.133607 -0.6296256 25.39414
1.407352 1.008147 -0.1131425 -1.579594 -2.861438 -3.378169 -3.038812 -2.003954 -0.4230602 25.41012
1.374495 0.9902636 -0.2996644 -1.702737 -3.079249 -3.487383 -3.102599 -1.778829 -0.3840032 25.40563
1.355937 0.9989512 -0.3545118 -1.856242 -3.178149 -3.578775 -3.116787 -1.698683 -0.09036512 25.39347
1.391236 0.9581302 -0.3472444 -1.861408 -3.362813 -3.617713 -3.082451 -1.610092 0.08077221 25.40889
1.399433 0.9346108 -0.3959779 -1.987612 -3.453102 -3.663212 -3.103057 -1.460295 0.1775268 25.38499
1.462968 0.8531632 -0.3624526 -2.20417 -3.405833 -3.721663 -2.937944 -1.370359 0.3188259 25.38442
1.403882 0.8126143 -0.5479507 -2.295699 -3.53874 -3.76027 -2.86957 -1.315046 0.4731908 25.38152
1.382054 0.7026194 -0.7345445 -2.412446 -3.638927 -3.798969 -2.857107 -1.190031 0.6110605 25.39383
1.372825 0.6358751 -0.7637866 -2.48975 -3.70132 -3.739165 -2.769514 -1.092504 0.732299 25.40385
1.323208 0.6830922 -0.8962593 -2.613356 -3.574561 -3.849434 -2.527087 -0.9416634 0.7969939 25.39873
1.289285 0.5301567 -1.04017 -2.772953 -3.790753 -3.773839 -2.450881 -0.7284988 0.9865087 25.40309
1.30077 0.4414823 -1.118887 -2.906891 -3.884625 -3.672447 -2.228457 -0.4646592 1.120844 25.38262
1.137272 0.3376181 -1.360427 -3.016979 -4.219143 -3.481745 -2.279193 -0.2806494 1.509762 25.39624
1.137126 0.3023909 -1.455994 -3.012171 -4.178331 -3.530272 -2.166651 -0.05700313 1.494152 25.39575
1.186537 0.2802466 -1.418199 -3.050323 -4.108273 -3.501019 -1.981316 -0.01219817 1.675895 25.3924
1.188756 0.2457795 -1.59098 -3.264617 -4.071124 -3.437665 -1.710851 0.1895033 1.732037 25.38689
1.152641 -0.02832622 -1.823405 -3.629564 -4.216199 -3.330145 -1.549857 0.4657693 1.887605 25.39572
1.19065 -0.02861873 -1.970937 -3.662004 -4.297549 -3.13729 -1.28938 0.788473 1.865383 25.38815
1.206228 0.04685518 -2.088806 -3.672893 -4.302299 -3.006203 -0.8956488 0.8469776 2.13319 25.39475
1.268487 -0.08625151 -2.027763 -4.007592 -4.160067 -2.932582 -0.6698269 1.087226 2.081321 25.39246
1.117322 -0.1926424 -2.340369 -4.156783 -4.329001 -2.778152 -0.5231138 1.256799 2.214584 25.39709
1.168648 -0.1992409 -2.266985 -4.138491 -4.265579 -2.6857 -0.2047093 1.525033 2.305256 25.40476
1.119295 -0.3028975 -2.347377 -4.182744 -4.260194 -2.579925 -0.007774514 1.832262 2.301147 25.39227
1.027738 -0.5003986 -2.539946 -4.333557 -4.205943 -2.402873 0.2258443 1.948647 2.444017 25.40465
0.9784032 -0.6186817 -2.745365 -4.338122 -4.166886 -2.15028 0.4480724 2.157808 2.513159 25.39269
1.008477 -0.5290523 -2.715651 -4.186758 -3.905894 -1.861171 0.7038742 2.375283 2.540268 25.40318
0.9682827 -0.7742785 -2.873694 -4.494859 -3.81933 -1.794264 0.7336959 2.365326 2.555315 25.39934
0.978914 -0.8186723 -3.042555 -4.633791 -3.754411 -1.470605 0.9454756 2.430828 2.3588 25.39792
0.975665 -0.8669639 -3.217564 -4.63007 -3.733683 -1.222846 1.471363 2.496127 2.132827 25.38512
0.9867552 -0.8420247 -3.293777 -4.594607 -3.644038 -0.809936 1.800793 2.631897 1.939019 25.39313
1.008512 -0.9055113 -3.373589 -4.604676 -3.561381 -0.5277195 2.029937 2.721858 1.547688 25.39145
0.9850418 -1.059142 -3.405119 -4.767247 -3.34202 -0.4476502 2.31317 2.702466 1.502576 25.40082
1.000611 -1.034842 -3.378496 -4.555243 -3.067476 -0.0262686 2.55475 2.789144 1.53972 25.39538
0.8871047 -1.210229 -3.605299 -4.65952 -2.833532 0.26362 2.434236 2.892101 1.363947 25.39835
0.8388072 -1.253652 -3.682436 -4.523992 -2.485977 0.4900617 2.650782 2.940339 1.217679 25.3992
0.8448223 -1.344651 -3.540604 -4.343541 -2.197538 0.9870064 2.970114 2.967928 0.9476861 25.39301
0.7774073 -1.435785 -3.716074 -4.306447 -2.197223 1.195093 3.042425 2.825031 0.6986012 25.38788
0.7105902 -1.508647 -3.859203 -4.278859 -2.145113 1.274077 3.173957 2.63133 0.6662624 25.39198
0.7593616 -1.521935 -3.733778 -4.092356 -1.686811 1.653421 3.432562 2.614487 0.5256236 25.39025
0.7109561 -1.708717 -4.101766 -4.407348 -1.528757 1.653018 3.380414 2.138874 0.01044318 25.38977
0.7482379 -1.787496 -4.238941 -4.249698 -1.143192 2.0758 3.343032 2.003422 -0.3554358 25.39284
0.7571625 -1.866509 -4.304616 -4.056342 -0.8150882 2.427796 3.385863 1.702803 -0.6414143 25.38806
0.6367795 -2.120895 -4.480034 -4.286547 -0.7385522 2.494833 3.227956 1.300343 -0.9765004 25.39465
0.634273 -2.031349 -4.261961 -3.776197 -0.1978034 3.03045 3.355125 1.13245 -1.19967 25.39323
0.3758294 -2.425297 -4.700195 -4.047158 -0.4271237 2.979662 3.008509 0.7156194 -1.687192 25.39714
0.4585342 -2.306072 -4.436012 -3.726611 0.06081768 3.064109 3.085277 0.2775098 -1.915183 25.38697
0.4517911 -2.395965 -4.372783 -3.541474 0.5027794 3.399795 3.03517 0.2903139 -2.102958 25.39441
0.4398783 -2.373143 -4.561822 -3.340274 0.547528 3.363193 2.857981 0.1726903 -2.397538 25.39055
0.4181504 -2.453932 -4.47989 -3.045452 1.024397 3.631397 2.775778 -0.06424843 -2.381866 25.39395
0.2863198 -2.853386 -4.839615 -3.306905 0.9940148 3.422308 2.246728 -0.7386811 -2.35092 25.38966
0.3785934 -2.772737 -4.605979 -2.730115 1.725566 3.766875 2.1347 -1.108905 -2.332157 25.39335
0.2560127 -2.982997 -4.876036 -2.85463 1.527401 3.527291 1.513603 -1.571046 -2.550431 25.39293
0.3028214 -2.902333 -4.609447 -2.350841 2.026052 3.629562 1.224639 -2.001344 -2.599894 25.39348
0.03670568 -3.250012 -4.905389 -2.490613 1.749076 3.189524 0.6281077 -2.540412 -2.764888 25.39263
0.1880312 -2.920227 -4.432184 -1.737197 2.517821 3.395847 0.526608 -2.810848 -2.919611 25.39305
-0.02547982 -3.26703 -4.611446 -2.081303 2.24926 2.897749 0.1547499 -3.091488 -3.027295 25.39201
-0.05748431 -3.24843 -4.485904 -1.666751 2.465227 2.953573 -0.1673995 -3.191302 -2.99948 25.39616
-0.03104105 -3.365604 -4.376379 -1.389477 2.730038 2.997801 -0.3054729 -3.225395 -2.619548 25.39701
0.00719697 -3.427222 -4.332934 -0.7919453 3.323739 3.003175 -0.593477 -3.18166 -1.976702 25.39065
-0.003887224 -3.680398 -4.4444 -0.6131272 3.473216 2.644987 -1.203032 -3.203242 -1.374884 25.3955
-0.03039814 -3.621054 -4.413108 -0.1841757 3.660032 2.412384 -1.622763 -3.294869 -0.8214332 25.38783
-0.1628181 -3.657688 -4.290288 0.009043623 3.866136 2.048839 -2.110898 -3.171545 -0.5329883 25.39352
-0.3243415 -3.758083 -4.241117 0.03433391 3.421638 1.492396 -2.909716 -3.408585 -0.3150786 25.39195
-0.3011484 -3.717295 -3.784148 0.4759981 3.970324 1.757071 -2.682681 -3.424798 0.2017413 25.39283
-0.3767969 -3.890505 -3.748066 0.4971284 3.799011 1.212667 -3.041572 -3.532659 0.4357362 25.39704
-0.3022157 -3.974608 -3.61698 0.8366352 3.849805 1.014331 -3.056753 -3.424242 0.7433409 25.38908
-0.361084 -4.050951 -3.648243 1.345738 3.938776 0.6427469 -3.16041 -2.829741 1.276605 25.39367
-0.4628704 -4.313447 -3.775385 1.494313 3.794002 -0.1349966 -3.404655 -2.233722 1.590461 25.39642
-0.5799005 -4.298278 -3.620081 1.802833 3.815365 -0.3083127 -3.426482 -1.473019 2.241647 25.39367
-0.6256537 -4.167371 -3.107546 2.140574 3.844882 -0.3055815 -3.267347 -0.5308002 3.289014 25.39505
-0.7107882 -4.24209 -2.958738 2.017426 3.327564 -1.011157 -3.781669 -0.4030325 3.611249 25.39614
-0.7405278 -4.255996 -2.758067 2.162235 3.063834 -1.268907 -4.017862 -0.08024727 3.804499 25.39404
-0.8078359 -4.177444 -2.61363 2.616819 3.161617 -1.225029 -3.603528 0.1691014 4.110308 25.39353
-0.7850904 -4.293229 -2.483032 2.820638 2.969493 -1.747926 -3.491359 0.2913492 3.883128 25.39484
-0.9007473 -4.711974 -2.495411 2.848953 2.708989 -2.814175 -3.24465 0.4083104 3.524436 25.3952
-1.035087 -4.894463 -2.296194 3.085365 2.675923 -3.028613 -2.933035 1.292687 3.179963 25.39439
-0.9698501 -4.676402 -1.697755 3.458633 2.640663 -2.971316 -2.312152 2.473632 2.98438 25.39125
-1.139483 -4.579971 -1.484285 3.57704 2.221067 -3.042096 -1.923818 3.376381 2.989842 25.3952
-1.174753 -4.497514 -1.402121 3.50168 1.527479 -3.614218 -2.122429 3.593656 2.794002 25.39419
-1.252217 -4.541922 -1.468703 3.509368 0.9460672 -3.943533 -2.244361 3.536571 2.669822 25.39473
-1.256422 -4.533568 -1.270598 3.735181 0.7465506 -3.968838 -1.907464 3.590956 2.446524 25.39162
-1.393835 -4.653619 -1.024251 3.983284 0.6520822 -3.883638 -1.343155 3.86659 1.665204 25.39325
-1.423716 -4.721397 -0.5511583 4.240435 0.5524112 -3.936396 -0.3922292 3.914117 0.6175386 25.39149
-1.539687 -4.761767 -0.2978628 4.251267 0.1188294 -3.886005 0.3420264 3.869156 -0.4303471 25.39344
-1.590161 -4.565727 0.04912942 4.166167 -0.269865 -3.543439 1.201226 4.090154 -1.299046 25.39408
-1.532666 -4.534219 0.02830681 3.398342 -1.227313 -4.172559 0.9763805 3.482023 -2.109693 25.39202
-1.549351 -4.3236 0.5193504 3.688985 -0.8886362 -3.718898 1.989195 3.737072 -1.914491 25.39589
-1.677386 -4.311483 0.6245179 3.773944 -1.033784 -3.645295 2.281303 3.838037 -2.181628 25.39645
-1.684746 -4.492052 0.9583183 3.689609 -1.403777 -3.791659 2.682106 3.300139 -2.281977 25.39591
-1.906958 -4.815965 0.9779611 3.39332 -2.437455 -3.988004 2.67403 2.19956 -2.661868 25.3912
-1.957581 -4.778977 1.660684 3.801505 -2.275104 -2.893125 3.963056 2.088092 -2.752333 25.39535
-2.047285 -4.634917 1.404107 3.037657 -3.340883 -2.807996 3.492951 0.923363 -3.953292 25.39261
-1.966829 -4.313696 1.587061 2.857157 -3.601941 -2.26694 3.80877 0.6633986 -4.567626 25.39389
-2.023559 -4.097264 1.667679 2.91893 -3.657711 -1.993285 4.254144 0.4160393 -4.565757 25.39423
-2.21744 -4.151314 1.623182 2.763611 -3.812792 -2.001571 4.384324 0.01321027 -4.398407 25.39533
-2.612154 -4.512983 1.584056 2.223543 -4.355544 -1.856331 3.957136 -0.5604174 -3.890465 25.39554
-2.520367 -4.397037 2.256149 2.091863 -4.63843 -1.046045 3.774941 -1.143284 -2.75447 25.39308
-2.451873 -4.109932 2.874861 2.20728 -4.254683 0.2644677 4.00146 -1.432415 -1.490167 25.3965
-2.371119 -3.777657 2.836152 1.571334 -4.244382 0.7971967 3.746778 -2.613638 -0.9152473 25.39348
-2.542989 -3.77389 2.598389 1.069738 -4.357088 0.9597103 3.569531 -3.466747 -0.6653178 25.39616
-2.477293 -3.775288 2.630917 0.5822484 -4.476471 0.7536674 3.719385 -4.367169 -0.2537828 25.39317
-2.439779 -3.68785 3.302012 0.744005 -3.889172 1.24866 4.28452 -4.232648 0.458879 25.39464
-2.693073 -3.954254 3.476758 -0.09684802 -4.597503 1.402416 3.241604 -4.638814 0.5201415 25.39489
-2.900834 -4.006212 3.853968 -0.5424059 -4.682728 2.199766 2.314948 -4.502245 1.472192 25.39444
-2.766473 -3.570523 4.122112 -0.5452397 -3.808851 3.165822 1.791067 -4.126428 3.205217 25.39328
-2.759377 -3.313777 3.90489 -0.7153167 -3.417647 3.708959 1.201962 -4.206518 4.373943 25.39395
-2.873582 -3.307509 3.343107 -1.263293 -4.031776 3.546453 0.2252249 -4.759563 4.390765 25.39198
-2.76613 -2.840667 3.641021 -1.139558 -3.571699 4.10246 0.2675408 -4.259459 4.589026 25.39395
-3.019951 -2.992377 3.656484 -1.834124 -3.688864 3.884578 -0.3628145 -4.116153 4.041339 25.39408
-3.130494 -2.666178 4.436203 -2.069923 -2.752387 4.570864 -0.8447571 -2.426486 3.545671 25.393
-3.610643 -2.879436 4.271459 -2.598714 -2.21266 4.540155 -1.901312 -1.326681 3.136247 25.39316
-3.168075 -2.169182 3.985322 -2.548994 -1.702035 4.670306 -2.985984 -0.06545441 3.488072 25.39214 ]

View File

@ -0,0 +1,121 @@
1 [
25.39608 0.6595209 -0.9581539 -0.05509383 -1.480929 -0.8893842 -1.681797 -1.098273 -1.243401 -0.6512086 -0.4587021 -0.05043437 0.05755575
25.39381 1.401111 1.276367 0.4099369 -0.9738815 -2.181025 -3.095079 -2.977805 -2.355355 -0.8275539 0.09961782 0.7469091 -0.005369159
25.38237 1.393399 1.287358 0.2071576 -0.9043588 -2.416422 -3.032005 -3.204013 -2.120457 -0.9960594 0.3383718 0.8016195 0.06577864
25.38654 1.379923 1.225827 0.1285188 -1.026023 -2.432684 -3.135521 -3.087541 -2.192475 -0.9149975 0.32583 0.985764 0.08788309
25.40916 1.390844 1.136527 0.1843494 -1.203498 -2.334297 -3.248569 -2.978277 -2.2813 -0.8658649 0.4545175 1.006223 0.1639242
25.38297 1.384702 1.105584 0.07471646 -1.240992 -2.576068 -3.160409 -3.111823 -2.09638 -0.863974 0.5807593 1.087075 0.1917681
25.39825 1.411307 1.051812 -0.02655348 -1.367039 -2.787011 -3.239229 -2.988708 -2.133251 -0.5477066 0.6291887 1.11602 0.2467811
25.3968 1.416273 1.03156 -0.1463996 -1.598212 -2.864465 -3.389926 -3.045623 -1.917265 -0.4401143 0.7997456 1.182584 0.2599318
25.3984 1.378285 1.003728 -0.2987479 -1.722621 -3.104068 -3.496778 -3.087887 -1.769975 -0.31738 0.9760999 1.339967 0.2145324
25.38439 1.363578 0.9850117 -0.3692382 -1.860339 -3.218788 -3.60353 -3.097793 -1.684179 -0.03071402 1.109145 1.304184 0.2844062
25.40594 1.392323 0.9575986 -0.3521634 -1.878422 -3.415221 -3.580741 -3.11795 -1.557577 0.09638613 1.120297 1.428832 0.2932371
25.39048 1.416178 0.9238334 -0.4007807 -2.032033 -3.430605 -3.702929 -3.050423 -1.430084 0.197562 1.371424 1.308928 0.3723306
25.3787 1.464186 0.8290362 -0.3764768 -2.24931 -3.408303 -3.757509 -2.887193 -1.377245 0.3596067 1.368912 1.426993 0.3605768
25.38585 1.383492 0.8056247 -0.6175001 -2.315953 -3.574461 -3.764355 -2.873989 -1.280048 0.5179416 1.550079 1.541427 0.3173845
25.39545 1.393926 0.6545959 -0.7327172 -2.439029 -3.677015 -3.773282 -2.848512 -1.167683 0.6329998 1.589357 1.85591 0.2296739
25.40651 1.350533 0.6581527 -0.7867699 -2.52762 -3.657286 -3.768898 -2.71004 -1.074361 0.7556911 1.781124 1.724643 0.3226899
25.39396 1.326218 0.6573808 -0.9267013 -2.642444 -3.590564 -3.851363 -2.497806 -0.8695696 0.8282326 1.819417 1.783856 0.3415034
25.40022 1.292459 0.507834 -1.080191 -2.78223 -3.847787 -3.725638 -2.401706 -0.6651718 1.009955 2.000422 1.814542 0.3358188
25.3949 1.251962 0.3925965 -1.173135 -2.983908 -3.953668 -3.654155 -2.23751 -0.4169448 1.234398 2.072157 1.788999 0.3671112
25.40088 1.136 0.3438413 -1.38044 -3.00539 -4.233454 -3.473226 -2.240769 -0.2289382 1.532514 2.122548 1.86871 0.3318342
25.40474 1.153765 0.3096808 -1.436207 -2.995998 -4.144277 -3.501545 -2.132554 -0.04458857 1.514287 2.225335 1.73603 0.3804624
25.39125 1.202605 0.2649527 -1.452848 -3.088228 -4.093655 -3.49153 -1.916322 0.0267159 1.709128 2.032177 1.654215 0.3629981
25.4066 1.109095 0.1238721 -1.712915 -3.435042 -4.154166 -3.479783 -1.705077 0.2707369 1.80672 2.149712 1.706285 0.1333067
25.3951 1.204601 0.005959769 -1.831075 -3.585684 -4.248459 -3.228891 -1.498703 0.5600159 1.843948 2.098616 1.511674 0.07791518
25.39434 1.189905 -0.007083135 -1.998363 -3.674732 -4.28191 -3.127707 -1.168897 0.7890184 1.935824 2.15375 1.148433 0.06201413
25.39326 1.182489 -0.01304965 -2.122438 -3.783591 -4.301538 -3.030363 -0.8507715 0.8840672 2.159688 1.898517 1.075552 -0.05250743
25.39042 1.244946 -0.1296432 -2.083518 -4.076716 -4.182724 -2.896407 -0.6420866 1.136407 2.091595 1.88641 0.8917655 -0.1430146
25.39459 1.156196 -0.1281042 -2.299061 -4.106387 -4.331763 -2.718668 -0.4247692 1.320015 2.217241 1.829382 0.7933849 -0.2738643
25.38486 1.13536 -0.2804177 -2.316438 -4.181387 -4.305693 -2.677593 -0.1862608 1.580003 2.33463 1.662863 0.7268399 -0.4022956
25.39576 1.118627 -0.313969 -2.361879 -4.193508 -4.176651 -2.552843 0.07219703 1.873536 2.306244 1.596717 0.4124145 -0.4448512
25.39695 1.084738 -0.4182397 -2.485966 -4.24519 -4.186189 -2.269332 0.3363949 2.03552 2.407838 1.357195 0.338299 -0.5935728
25.38689 0.9748012 -0.6208349 -2.760934 -4.32318 -4.096678 -2.101825 0.5001053 2.210387 2.511196 1.482927 0.06601807 -0.7177349
25.39787 1.014486 -0.5486525 -2.723525 -4.19924 -3.857696 -1.82758 0.7387723 2.378424 2.551791 1.196076 -0.3493665 -0.6571725
25.40326 0.9668196 -0.8153204 -2.917299 -4.558866 -3.808855 -1.706933 0.7234591 2.420333 2.496037 1.327209 -0.2594952 -0.8544234
25.39464 0.9743929 -0.8506861 -3.10229 -4.667871 -3.754556 -1.428725 1.091263 2.391257 2.345948 1.048951 -0.2684586 -0.9234016
25.3835 0.9588755 -0.8711729 -3.258343 -4.636948 -3.730234 -1.126683 1.549204 2.549823 2.068453 0.7800729 -0.4888894 -0.9244992
25.39136 1.015647 -0.8124251 -3.275899 -4.537634 -3.586413 -0.7039872 1.895392 2.667868 1.855399 0.3212259 -0.7001661 -0.8749778
25.39826 0.9390439 -1.068062 -3.513859 -4.758694 -3.582233 -0.5510803 2.033499 2.73268 1.512825 0.4327154 -0.7254434 -0.9670301
25.39433 0.9811047 -1.058309 -3.393357 -4.765159 -3.303966 -0.3637304 2.411272 2.676879 1.516132 -0.06876485 -0.9087597 -0.9274111
25.39488 0.9404514 -1.115232 -3.511874 -4.616561 -3.066789 0.007479461 2.482418 2.792868 1.502655 -0.5019601 -1.11686 -0.8942358
25.39559 0.8337971 -1.318671 -3.715201 -4.725999 -2.823731 0.2907045 2.445621 2.867822 1.354906 -0.6649113 -1.265579 -0.915262
25.38846 0.8066285 -1.291624 -3.680167 -4.519381 -2.431271 0.5655504 2.714079 2.948334 1.129172 -0.9987298 -1.563547 -0.8478273
25.39359 0.8454702 -1.387438 -3.541444 -4.301412 -2.134841 1.087112 3.049462 2.976198 0.8826582 -1.065726 -2.021794 -0.6722531
25.39786 0.6554265 -1.602208 -3.958938 -4.484985 -2.370814 1.129071 2.967727 2.728607 0.6788831 -1.015548 -1.781345 -0.7529901
25.40098 0.7446101 -1.469392 -3.773205 -4.147902 -2.00407 1.410727 3.28349 2.673715 0.6834164 -1.330184 -1.993082 -0.5462615
25.39558 0.6955266 -1.633757 -3.91284 -4.289802 -1.738611 1.577837 3.372359 2.414202 0.396918 -1.403592 -1.892191 -0.4956261
25.39386 0.634804 -1.856538 -4.31057 -4.535988 -1.571848 1.65975 3.316414 2.082177 -0.09738034 -1.382434 -1.402779 -0.4935276
25.39646 0.7693399 -1.794024 -4.206841 -4.133829 -0.9590068 2.221774 3.406887 2.006833 -0.4169021 -1.769292 -1.302819 -0.1948395
25.39031 0.7170282 -1.963971 -4.379105 -4.154769 -0.8258301 2.417751 3.357678 1.539518 -0.6831351 -1.94868 -0.8957705 -0.08620808
25.39644 0.6570426 -2.043686 -4.388024 -4.128809 -0.5803539 2.642519 3.238543 1.289491 -1.071701 -1.81218 -0.8122736 0.1259993
25.39023 0.6264482 -2.056614 -4.267493 -3.730954 -0.1605028 3.094971 3.316931 1.037805 -1.314276 -1.949592 -0.7443309 0.3889642
25.39391 0.4324989 -2.319285 -4.570457 -3.890029 -0.2741919 3.037142 3.021763 0.6014917 -1.768123 -1.861037 -0.2545858 0.4110309
25.39349 0.5259162 -2.22434 -4.230934 -3.485613 0.3945462 3.304989 3.203973 0.3679494 -1.923899 -2.036441 -0.1910191 0.6413785
25.38898 0.5105083 -2.331731 -4.356012 -3.461822 0.5154694 3.369842 2.98262 0.2247323 -2.207041 -1.955508 0.04480025 0.6809
25.39424 0.4690387 -2.324314 -4.479813 -3.170043 0.6985514 3.478192 2.873146 0.1997507 -2.433693 -1.905935 0.2024981 0.7611026
25.3911 0.3938288 -2.531708 -4.546507 -3.078403 1.061407 3.598579 2.674582 -0.2301056 -2.364634 -1.843756 0.4362425 0.8247086
25.39556 0.3886783 -2.711477 -4.627234 -2.97469 1.343206 3.667859 2.261359 -0.7860026 -2.319474 -1.541456 0.7089025 0.829094
25.38706 0.3992852 -2.761646 -4.569061 -2.664965 1.821735 3.77127 2.064602 -1.207806 -2.379433 -1.186758 0.8638195 0.8910642
25.39788 0.3106323 -2.888816 -4.730036 -2.59701 1.788494 3.671894 1.509059 -1.620997 -2.538884 -0.7833451 1.215784 0.8551151
25.3968 0.2208461 -3.019401 -4.707914 -2.404878 1.926125 3.519924 1.056983 -2.150348 -2.660008 -0.4545089 1.674433 0.8501246
25.38883 0.09665678 -3.134991 -4.756564 -2.29358 1.917393 3.163052 0.5329977 -2.711747 -2.852752 0.1123346 2.1717 0.8424319
25.38903 0.1795857 -2.931179 -4.37874 -1.657462 2.630224 3.461804 0.5173655 -2.779907 -2.924237 -0.01468956 2.062378 0.7617861
25.3899 0.0154831 -3.216196 -4.477682 -1.920576 2.408597 2.928103 0.1156944 -3.141251 -3.060516 0.2467021 2.665351 0.602103
25.39197 0.009429784 -3.177636 -4.299308 -1.368247 2.805496 3.232197 -0.003600859 -3.090051 -2.860243 0.2804382 2.41232 0.480293
25.38938 -0.05418803 -3.405716 -4.42114 -1.323507 2.735739 2.933275 -0.4497724 -3.26091 -2.50248 0.5448469 2.488349 0.3666969
25.3979 -0.1002714 -3.658289 -4.567589 -0.9876583 3.120006 2.666653 -0.8723732 -3.256884 -1.83888 1.034785 2.027886 0.217672
25.39607 0.0288604 -3.629244 -4.362079 -0.4259778 3.652335 2.682682 -1.223551 -3.203444 -1.200807 1.38206 1.421586 0.02934884
25.39347 -0.2385861 -3.874293 -4.695451 -0.439531 3.351912 2.042277 -1.898209 -3.319998 -0.7196015 1.871045 1.484312 -0.2415925
25.39333 -0.178312 -3.62081 -4.17474 0.1785252 4.025741 2.172521 -2.121588 -3.071793 -0.4090044 2.334521 0.7480026 -0.3941457
25.39682 -0.3141245 -3.737935 -4.138998 0.1637467 3.531245 1.556591 -2.889966 -3.403666 -0.1890331 2.863672 0.6633884 -0.5199888
25.39563 -0.3573751 -3.804209 -3.837905 0.3453543 3.756353 1.44176 -2.923827 -3.602793 0.1920436 2.745297 0.5471389 -0.6847376
25.39661 -0.3479443 -3.888128 -3.638222 0.7058708 4.037488 1.404892 -2.851534 -3.361458 0.5795274 2.675339 0.2492743 -0.8172188
25.39479 -0.5934795 -4.352557 -4.01034 0.5315409 3.384104 0.5076691 -3.342246 -3.399735 0.8842365 2.785138 0.6507739 -0.9503966
25.39425 -0.4196319 -4.151624 -3.693012 1.418871 3.986712 0.5407985 -3.140143 -2.653084 1.399719 2.261919 -0.168319 -1.001698
25.39023 -0.4821656 -4.338616 -3.76013 1.543306 3.749807 -0.3174723 -3.473772 -2.078088 1.671417 2.27176 -0.5218784 -0.988964
25.3956 -0.8552602 -4.633649 -3.933339 1.445703 3.332988 -0.7826147 -3.736487 -1.392235 2.459047 2.151047 -0.920535 -1.050402
25.39037 -0.5972642 -4.112915 -3.009016 2.156868 3.672545 -0.5362386 -3.443495 -0.499207 3.39324 1.999583 -1.961218 -0.8979888
25.39725 -0.6634712 -4.150053 -2.750626 2.208332 3.495676 -0.8485923 -3.600156 -0.1248213 3.803953 1.952311 -2.305469 -0.793108
25.39277 -0.9479498 -4.521524 -3.028249 1.946641 2.790704 -1.535761 -4.175349 -0.1800363 3.806693 1.776449 -2.21496 -0.790504
25.39759 -0.9929648 -4.454915 -2.909572 2.338028 2.692453 -1.747454 -3.973534 -0.0439007 3.923756 1.411864 -2.277675 -0.6739596
25.39358 -0.8275152 -4.401517 -2.459442 2.915899 3.017301 -1.847339 -3.242939 0.3813381 3.869132 1.070749 -2.389175 -0.5018433
25.38981 -0.9245701 -4.779424 -2.46105 2.879549 2.656358 -2.991104 -3.247016 0.5399511 3.397641 0.1291516 -1.71416 -0.3277176
25.39294 -1.197794 -5.133918 -2.494724 2.839587 2.317796 -3.464848 -3.195415 1.395362 3.044854 -0.7985698 -1.172464 -0.06178374
25.38969 -0.957609 -4.659888 -1.704898 3.307278 2.276371 -3.377539 -2.516868 2.483862 2.848398 -1.737945 -1.2311 0.3061946
25.39302 -1.127122 -4.533212 -1.455567 3.549684 1.977633 -3.272105 -2.075983 3.393464 2.862955 -2.145893 -1.208638 0.5534311
25.39511 -1.21762 -4.461216 -1.241092 3.736875 1.75684 -3.211917 -1.635185 3.946145 3.035527 -2.344062 -1.197175 0.7844507
25.3896 -1.344751 -4.637038 -1.566798 3.465493 0.7636025 -4.054176 -2.322286 3.488001 2.61532 -2.641398 -0.6737604 0.8044318
25.39262 -1.422682 -4.720421 -1.315231 3.703445 0.7171685 -3.938689 -1.727554 3.685732 2.343668 -2.70137 -0.7220458 0.9508815
25.39382 -1.715271 -5.128881 -1.456941 3.583168 0.158038 -4.373479 -1.613447 3.679257 1.275511 -2.451297 -0.1742396 1.089375
25.39132 -1.433359 -4.805173 -0.6603109 3.958865 0.1490136 -4.403913 -0.6100293 3.575091 0.2017417 -2.433694 0.1227985 1.161998
25.39603 -1.589545 -4.839986 -0.4797902 3.911168 -0.3960432 -4.273879 0.0468164 3.551844 -0.8884212 -2.647773 0.8494496 1.207644
25.39602 -1.611716 -4.650408 -0.1505726 3.721827 -0.8102283 -4.07605 0.7381689 3.645723 -1.741966 -3.101535 1.501467 1.141174
25.39135 -1.529538 -4.429765 0.3749141 3.792032 -0.787674 -3.613407 1.656679 3.851177 -1.849397 -3.64194 1.862776 0.9517084
25.39513 -1.614772 -4.438097 0.255153 3.298056 -1.350288 -4.228018 1.567837 3.325321 -2.223 -3.773636 2.274166 0.8473473
25.39181 -1.634263 -4.302464 0.6846615 3.732112 -1.1697 -3.737545 2.301027 3.732112 -2.261038 -3.0256 1.955217 0.7454891
25.39159 -1.740555 -4.545712 1.08881 3.782276 -1.417985 -3.62708 2.95438 3.248435 -2.216609 -2.030725 1.773224 0.652676
25.39181 -1.836086 -4.669994 1.387012 3.815741 -2.169345 -3.419776 3.342448 2.462875 -2.511709 -0.640659 1.477757 0.2674761
25.39583 -1.980653 -4.846069 1.35684 3.204206 -2.972459 -3.382097 3.311293 1.322475 -3.270529 0.3303748 1.615174 -0.187346
25.39167 -1.968198 -4.421285 1.691718 3.348696 -3.053273 -2.275577 4.021816 1.203073 -3.903115 1.266266 1.357396 -0.5597394
25.39645 -2.256313 -4.569074 1.303622 2.574072 -3.835924 -2.374366 3.614606 0.3834634 -4.70789 1.667082 1.683081 -0.6147239
25.39249 -2.199976 -4.252557 1.507535 2.785041 -3.755555 -2.088585 4.205575 0.2271294 -4.59061 1.877299 1.286854 -0.7685128
25.39421 -2.261936 -4.199285 1.503708 2.483779 -4.113506 -2.187372 4.116325 -0.2629535 -4.409859 2.087287 1.272506 -0.8198587
25.39548 -2.287668 -4.12232 2.18515 2.677553 -4.022824 -1.321473 4.370759 -0.2877623 -3.455403 2.336393 0.7669829 -1.162279
25.39491 -2.487674 -4.22797 2.768238 2.647547 -4.050305 -0.1890717 4.437805 -0.6508834 -2.094266 2.846475 0.07996888 -1.236835
25.39287 -2.578163 -4.129606 2.927238 2.286407 -3.943671 0.7228366 4.344687 -1.371715 -1.101616 3.741023 -0.7569331 -1.26316
25.39086 -2.427197 -3.754699 2.817255 1.529514 -4.155435 0.9788993 3.803943 -2.707731 -0.767959 4.441221 -1.126708 -1.285825
25.39401 -2.547151 -3.809549 2.492844 0.8109949 -4.547522 0.7959823 3.465753 -3.835242 -0.6310719 4.421063 -1.145508 -1.190577
25.39615 -2.386952 -3.568172 3.072759 1.068389 -3.855132 1.273782 4.304488 -3.909682 0.2549425 4.074016 -1.263062 -1.11404
25.39224 -2.398592 -3.626514 3.489653 0.7210477 -3.81793 1.423074 4.316233 -4.166702 0.6074756 3.246344 -1.351691 -0.9062979
25.39235 -2.778072 -3.929898 3.681883 0.01197612 -4.45978 1.809032 3.210696 -4.355368 0.7947038 1.876583 -1.341985 -0.5352947
25.39638 -2.752123 -3.701761 4.293683 -0.1363454 -3.997528 2.85506 2.632117 -3.920329 2.169754 0.7456899 -1.637011 0.1647733
25.39244 -2.815723 -3.493327 4.180171 -0.4144949 -3.380938 3.594135 1.885116 -3.876452 3.750889 0.1804897 -1.941075 0.761199
25.3928 -2.915109 -3.584018 3.374968 -1.305688 -4.149791 3.194752 0.4554631 -4.870588 4.126951 -0.3852961 -1.65064 0.9826126
25.39375 -2.671411 -2.877055 3.763553 -0.7819858 -3.393721 4.176661 0.6797315 -4.162246 4.811341 -0.1373484 -1.785284 0.8865886
25.39448 -3.087644 -3.186778 3.347238 -1.566059 -3.960502 3.758589 -0.1409264 -4.559641 4.290199 -0.4153855 -1.563303 1.104712
25.39384 -3.154791 -2.817494 4.183438 -1.332222 -2.787263 4.635742 0.1727483 -3.131655 4.365574 -0.4409717 -1.666639 1.379987
25.39325 -3.34754 -2.945146 4.10977 -2.688161 -3.223692 4.110541 -1.598916 -2.658613 3.011819 -1.385751 -0.9060279 1.575056
25.39371 -3.40345 -2.609131 4.31201 -2.606999 -2.048678 4.487849 -2.186281 -1.033186 3.214072 -2.750734 -0.3203016 1.584483
25.39429 -3.447818 -2.364646 3.679238 -2.581827 -1.782292 4.630171 -3.277408 -0.01428371 3.341475 -3.719985 0.08800912 1.608494
25.39458 -2.659129 -2.246307 0.6763005 -2.178889 -0.8830279 0.6955065 -1.452893 -0.3107683 1.023859 -1.04017 0.405329 0.9133215 ]

View File

@ -0,0 +1,119 @@
1 [
25.38532 1.413609 1.290824 0.4644783 -0.9542531 -2.107659 -3.068145 -2.939797 -2.310168 -0.8795097 0.07123499 0.6953657 0.006708961
25.40668 1.390788 1.282347 0.2231341 -0.9314669 -2.371381 -3.059567 -3.144366 -2.177221 -0.9286566 0.2787067 0.7727678 0.05165089
25.39494 1.388577 1.236941 0.1314043 -0.9991927 -2.458853 -3.084329 -3.129788 -2.174042 -0.9450245 0.3303247 0.9597021 0.07441664
25.38965 1.375442 1.173042 0.1502425 -1.142179 -2.355545 -3.236665 -3.001543 -2.267133 -0.87585 0.429065 0.9958894 0.1438726
25.40345 1.393851 1.104497 0.141741 -1.257701 -2.464931 -3.169199 -3.066706 -2.124195 -0.9064269 0.5478933 1.064987 0.1910156
25.39414 1.402755 1.074741 -0.02913864 -1.290972 -2.777904 -3.207083 -3.022014 -2.133607 -0.6296256 0.6236882 1.107699 0.2333915
25.41012 1.407352 1.008147 -0.1131425 -1.579594 -2.861438 -3.378169 -3.038812 -2.003954 -0.4230602 0.7642407 1.178372 0.2434251
25.40563 1.374495 0.9902636 -0.2996644 -1.702737 -3.079249 -3.487383 -3.102599 -1.778829 -0.3840032 0.9728982 1.342047 0.1914193
25.39347 1.355937 0.9989512 -0.3545118 -1.856242 -3.178149 -3.578775 -3.116787 -1.698683 -0.09036512 1.096203 1.291682 0.2742309
25.40889 1.391236 0.9581302 -0.3472444 -1.861408 -3.362813 -3.617713 -3.082451 -1.610092 0.08077221 1.110428 1.392577 0.2974799
25.38499 1.399433 0.9346108 -0.3959779 -1.987612 -3.453102 -3.663212 -3.103057 -1.460295 0.1775268 1.305384 1.366915 0.3381164
25.38442 1.462968 0.8531632 -0.3624526 -2.20417 -3.405833 -3.721663 -2.937944 -1.370359 0.3188259 1.353597 1.382275 0.3752744
25.38152 1.403882 0.8126143 -0.5479507 -2.295699 -3.53874 -3.76027 -2.86957 -1.315046 0.4731908 1.513728 1.501024 0.3294289
25.39383 1.382054 0.7026194 -0.7345445 -2.412446 -3.638927 -3.798969 -2.857107 -1.190031 0.6110605 1.57739 1.795883 0.2402891
25.40385 1.372825 0.6358751 -0.7637866 -2.48975 -3.70132 -3.739165 -2.769514 -1.092504 0.732299 1.730426 1.760171 0.30169
25.39873 1.323208 0.6830922 -0.8962593 -2.613356 -3.574561 -3.849434 -2.527087 -0.9416634 0.7969939 1.820451 1.757689 0.3403743
25.40309 1.289285 0.5301567 -1.04017 -2.772953 -3.790753 -3.773839 -2.450881 -0.7284988 0.9865087 1.960564 1.844692 0.3179868
25.38262 1.30077 0.4414823 -1.118887 -2.906891 -3.884625 -3.672447 -2.228457 -0.4646592 1.120844 2.053152 1.715453 0.4030866
25.39624 1.137272 0.3376181 -1.360427 -3.016979 -4.219143 -3.481745 -2.279193 -0.2806494 1.509762 2.086309 1.938008 0.2927777
25.39575 1.137126 0.3023909 -1.455994 -3.012171 -4.178331 -3.530272 -2.166651 -0.05700313 1.494152 2.292292 1.753569 0.3678013
25.3924 1.186537 0.2802466 -1.418199 -3.050323 -4.108273 -3.501019 -1.981316 -0.01219817 1.675895 2.070936 1.67329 0.3792064
25.38689 1.188756 0.2457795 -1.59098 -3.264617 -4.071124 -3.437665 -1.710851 0.1895033 1.732037 1.997065 1.604348 0.2684189
25.39572 1.152641 -0.02832622 -1.823405 -3.629564 -4.216199 -3.330145 -1.549857 0.4657693 1.887605 2.158598 1.633109 0.03561019
25.38815 1.19065 -0.02861873 -1.970937 -3.662004 -4.297549 -3.13729 -1.28938 0.788473 1.865383 2.201824 1.238912 0.05587117
25.39475 1.206228 0.04685518 -2.088806 -3.672893 -4.302299 -3.006203 -0.8956488 0.8469776 2.13319 1.874846 1.030332 0.02053536
25.39246 1.268487 -0.08625151 -2.027763 -4.007592 -4.160067 -2.932582 -0.6698269 1.087226 2.081321 1.842439 0.8919858 -0.08929317
25.39709 1.117322 -0.1926424 -2.340369 -4.156783 -4.329001 -2.778152 -0.5231138 1.256799 2.214584 1.95604 0.9139624 -0.3135178
25.40476 1.168648 -0.1992409 -2.266985 -4.138491 -4.265579 -2.6857 -0.2047093 1.525033 2.305256 1.653311 0.6612459 -0.3235066
25.39227 1.119295 -0.3028975 -2.347377 -4.182744 -4.260194 -2.579925 -0.007774514 1.832262 2.301147 1.608258 0.5232127 -0.4441239
25.40465 1.027738 -0.5003986 -2.539946 -4.333557 -4.205943 -2.402873 0.2258443 1.948647 2.444017 1.546272 0.4649984 -0.6321162
25.39269 0.9784032 -0.6186817 -2.745365 -4.338122 -4.166886 -2.15028 0.4480724 2.157808 2.513159 1.470017 0.231031 -0.7413772
25.40318 1.008477 -0.5290523 -2.715651 -4.186758 -3.905894 -1.861171 0.7038742 2.375283 2.540268 1.236312 -0.3244483 -0.6361544
25.39934 0.9682827 -0.7742785 -2.873694 -4.494859 -3.81933 -1.794264 0.7336959 2.365326 2.555315 1.30926 -0.2432571 -0.8264175
25.39792 0.978914 -0.8186723 -3.042555 -4.633791 -3.754411 -1.470605 0.9454756 2.430828 2.3588 1.122578 -0.2830999 -0.8937572
25.38512 0.975665 -0.8669639 -3.217564 -4.63007 -3.733683 -1.222846 1.471363 2.496127 2.132827 0.8846477 -0.4851157 -0.9063202
25.39313 0.9867552 -0.8420247 -3.293777 -4.594607 -3.644038 -0.809936 1.800793 2.631897 1.939019 0.3899176 -0.5838321 -0.9148196
25.39145 1.008512 -0.9055113 -3.373589 -4.604676 -3.561381 -0.5277195 2.029937 2.721858 1.547688 0.3659887 -0.8152301 -0.8852096
25.40082 0.9850418 -1.059142 -3.405119 -4.767247 -3.34202 -0.4476502 2.31317 2.702466 1.502576 0.04555156 -0.8558514 -0.9306388
25.39538 1.000611 -1.034842 -3.378496 -4.555243 -3.067476 -0.0262686 2.55475 2.789144 1.53972 -0.4570932 -1.183553 -0.8500152
25.39835 0.8871047 -1.210229 -3.605299 -4.65952 -2.833532 0.26362 2.434236 2.892101 1.363947 -0.6559085 -1.266635 -0.8815719
25.3992 0.8388072 -1.253652 -3.682436 -4.523992 -2.485977 0.4900617 2.650782 2.940339 1.217679 -0.9644551 -1.508766 -0.8493617
25.39301 0.8448223 -1.344651 -3.540604 -4.343541 -2.197538 0.9870064 2.970114 2.967928 0.9476861 -1.078636 -1.916157 -0.7201187
25.38788 0.7774073 -1.435785 -3.716074 -4.306447 -2.197223 1.195093 3.042425 2.825031 0.6986012 -1.079678 -1.915956 -0.6693316
25.39198 0.7105902 -1.508647 -3.859203 -4.278859 -2.145113 1.274077 3.173957 2.63133 0.6662624 -1.228381 -1.88434 -0.6269987
25.39025 0.7593616 -1.521935 -3.733778 -4.092356 -1.686811 1.653421 3.432562 2.614487 0.5256236 -1.42891 -2.056682 -0.4286263
25.38977 0.7109561 -1.708717 -4.101766 -4.407348 -1.528757 1.653018 3.380414 2.138874 0.01044318 -1.415599 -1.551451 -0.4677523
25.39284 0.7482379 -1.787496 -4.238941 -4.249698 -1.143192 2.0758 3.343032 2.003422 -0.3554358 -1.662306 -1.309078 -0.2771662
25.38806 0.7571625 -1.866509 -4.304616 -4.056342 -0.8150882 2.427796 3.385863 1.702803 -0.6414143 -1.939726 -0.9966753 -0.1034371
25.39465 0.6367795 -2.120895 -4.480034 -4.286547 -0.7385522 2.494833 3.227956 1.300343 -0.9765004 -1.802161 -0.7459382 0.02910506
25.39323 0.634273 -2.031349 -4.261961 -3.776197 -0.1978034 3.03045 3.355125 1.13245 -1.19967 -1.945508 -0.827869 0.3568698
25.39714 0.3758294 -2.425297 -4.700195 -4.047158 -0.4271237 2.979662 3.008509 0.7156194 -1.687192 -1.800906 -0.2786864 0.3350899
25.38697 0.4585342 -2.306072 -4.436012 -3.726611 0.06081768 3.064109 3.085277 0.2775098 -1.915183 -1.958909 -0.07750943 0.5295318
25.39441 0.4517911 -2.395965 -4.372783 -3.541474 0.5027794 3.399795 3.03517 0.2903139 -2.102958 -1.979737 -0.08618137 0.697251
25.39055 0.4398783 -2.373143 -4.561822 -3.340274 0.547528 3.363193 2.857981 0.1726903 -2.397538 -1.89124 0.1970018 0.7143155
25.39395 0.4181504 -2.453932 -4.47989 -3.045452 1.024397 3.631397 2.775778 -0.06424843 -2.381866 -1.886943 0.3437725 0.8362165
25.38966 0.2863198 -2.853386 -4.839615 -3.306905 0.9940148 3.422308 2.246728 -0.7386811 -2.35092 -1.518461 0.8290582 0.7407444
25.39335 0.3785934 -2.772737 -4.605979 -2.730115 1.725566 3.766875 2.1347 -1.108905 -2.332157 -1.308433 0.8221107 0.8825022
25.39293 0.2560127 -2.982997 -4.876036 -2.85463 1.527401 3.527291 1.513603 -1.571046 -2.550431 -0.7851809 1.298196 0.8088276
25.39348 0.3028214 -2.902333 -4.609447 -2.350841 2.026052 3.629562 1.224639 -2.001344 -2.599894 -0.5872015 1.47986 0.8651814
25.39263 0.03670568 -3.250012 -4.905389 -2.490613 1.749076 3.189524 0.6281077 -2.540412 -2.764888 0.07456205 2.214025 0.8147147
25.39305 0.1880312 -2.920227 -4.432184 -1.737197 2.517821 3.395847 0.526608 -2.810848 -2.919611 0.02410484 2.01337 0.7969061
25.39201 -0.02547982 -3.26703 -4.611446 -2.081303 2.24926 2.897749 0.1547499 -3.091488 -3.027295 0.2715201 2.75045 0.627373
25.39616 -0.05748431 -3.24843 -4.485904 -1.666751 2.465227 2.953573 -0.1673995 -3.191302 -2.99948 0.3945552 2.704575 0.4837284
25.39701 -0.03104105 -3.365604 -4.376379 -1.389477 2.730038 2.997801 -0.3054729 -3.225395 -2.619548 0.4325757 2.51992 0.3990406
25.39065 0.00719697 -3.427222 -4.332934 -0.7919453 3.323739 3.003175 -0.593477 -3.18166 -1.976702 0.827894 1.91626 0.2893917
25.3955 -0.003887224 -3.680398 -4.4444 -0.6131272 3.473216 2.644987 -1.203032 -3.203242 -1.374884 1.297321 1.583967 0.07641211
25.38783 -0.03039814 -3.621054 -4.413108 -0.1841757 3.660032 2.412384 -1.622763 -3.294869 -0.8214332 1.654651 1.267738 -0.1604561
25.39352 -0.1628181 -3.657688 -4.290288 0.009043623 3.866136 2.048839 -2.110898 -3.171545 -0.5329883 2.173151 0.9563589 -0.3739195
25.39195 -0.3243415 -3.758083 -4.241117 0.03433391 3.421638 1.492396 -2.909716 -3.408585 -0.3150786 2.870423 0.8213838 -0.4789027
25.39283 -0.3011484 -3.717295 -3.784148 0.4759981 3.970324 1.757071 -2.682681 -3.424798 0.2017413 2.71036 0.4048137 -0.6445876
25.39704 -0.3767969 -3.890505 -3.748066 0.4971284 3.799011 1.212667 -3.041572 -3.532659 0.4357362 2.735048 0.4242261 -0.7848022
25.38908 -0.3022157 -3.974608 -3.61698 0.8366352 3.849805 1.014331 -3.056753 -3.424242 0.7433409 2.596978 0.2446404 -0.9070554
25.39367 -0.361084 -4.050951 -3.648243 1.345738 3.938776 0.6427469 -3.16041 -2.829741 1.276605 2.315392 -0.07778139 -0.9921218
25.39642 -0.4628704 -4.313447 -3.775385 1.494313 3.794002 -0.1349966 -3.404655 -2.233722 1.590461 2.275629 -0.4133664 -0.995474
25.39367 -0.5799005 -4.298278 -3.620081 1.802833 3.815365 -0.3083127 -3.426482 -1.473019 2.241647 2.08187 -1.076278 -0.9939865
25.39505 -0.6256537 -4.167371 -3.107546 2.140574 3.844882 -0.3055815 -3.267347 -0.5308002 3.289014 1.999882 -1.924684 -0.8966939
25.39614 -0.7107882 -4.24209 -2.958738 2.017426 3.327564 -1.011157 -3.781669 -0.4030325 3.611249 1.935135 -2.139185 -0.8689573
25.39404 -0.7405278 -4.255996 -2.758067 2.162235 3.063834 -1.268907 -4.017862 -0.08024727 3.804499 1.835143 -2.310755 -0.7657563
25.39353 -0.8078359 -4.177444 -2.61363 2.616819 3.161617 -1.225029 -3.603528 0.1691014 4.110308 1.488137 -2.454586 -0.6299143
25.39484 -0.7850904 -4.293229 -2.483032 2.820638 2.969493 -1.747926 -3.491359 0.2913492 3.883128 1.250763 -2.380477 -0.5352952
25.3952 -0.9007473 -4.711974 -2.495411 2.848953 2.708989 -2.814175 -3.24465 0.4083104 3.524436 0.3579024 -1.862941 -0.3946679
25.39439 -1.035087 -4.894463 -2.296194 3.085365 2.675923 -3.028613 -2.933035 1.292687 3.179963 -0.6647872 -1.539418 -0.1334762
25.39125 -0.9698501 -4.676402 -1.697755 3.458633 2.640663 -2.971316 -2.312152 2.473632 2.98438 -1.442109 -1.435267 0.2667011
25.3952 -1.139483 -4.579971 -1.484285 3.57704 2.221067 -3.042096 -1.923818 3.376381 2.989842 -2.069424 -1.306365 0.5391726
25.39419 -1.174753 -4.497514 -1.402121 3.50168 1.527479 -3.614218 -2.122429 3.593656 2.794002 -2.392645 -1.051739 0.6572495
25.39473 -1.252217 -4.541922 -1.468703 3.509368 0.9460672 -3.943533 -2.244361 3.536571 2.669822 -2.611197 -0.7723851 0.7709473
25.39162 -1.256422 -4.533568 -1.270598 3.735181 0.7465506 -3.968838 -1.907464 3.590956 2.446524 -2.67715 -0.6728612 0.9147022
25.39325 -1.393835 -4.653619 -1.024251 3.983284 0.6520822 -3.883638 -1.343155 3.86659 1.665204 -2.559659 -0.5779212 1.074652
25.39149 -1.423716 -4.721397 -0.5511583 4.240435 0.5524112 -3.936396 -0.3922292 3.914117 0.6175386 -2.368694 -0.1952814 1.181569
25.39344 -1.539687 -4.761767 -0.2978628 4.251267 0.1188294 -3.886005 0.3420264 3.869156 -0.4303471 -2.505547 0.4407901 1.20869
25.39408 -1.590161 -4.565727 0.04912942 4.166167 -0.269865 -3.543439 1.201226 4.090154 -1.299046 -2.90868 1.164906 1.189988
25.39202 -1.532666 -4.534219 0.02830681 3.398342 -1.227313 -4.172559 0.9763805 3.482023 -2.109693 -3.632659 2.006691 1.048563
25.39589 -1.549351 -4.3236 0.5193504 3.688985 -0.8886362 -3.718898 1.989195 3.737072 -1.914491 -3.801989 2.065796 0.8161734
25.39645 -1.677386 -4.311483 0.6245179 3.773944 -1.033784 -3.645295 2.281303 3.838037 -2.181628 -3.198649 2.011589 0.7663411
25.39591 -1.684746 -4.492052 0.9583183 3.689609 -1.403777 -3.791659 2.682106 3.300139 -2.281977 -2.350026 1.840471 0.6822743
25.3912 -1.906958 -4.815965 0.9779611 3.39332 -2.437455 -3.988004 2.67403 2.19956 -2.661868 -1.082561 1.781868 0.3928866
25.39535 -1.957581 -4.778977 1.660684 3.801505 -2.275104 -2.893125 3.963056 2.088092 -2.752333 0.1982017 1.368886 -0.04508287
25.39261 -2.047285 -4.634917 1.404107 3.037657 -3.340883 -2.807996 3.492951 0.923363 -3.953292 1.018387 1.592916 -0.4543391
25.39389 -1.966829 -4.313696 1.587061 2.857157 -3.601941 -2.26694 3.80877 0.6633986 -4.567626 1.584764 1.410314 -0.6746552
25.39423 -2.023559 -4.097264 1.667679 2.91893 -3.657711 -1.993285 4.254144 0.4160393 -4.565757 1.824764 1.220285 -0.7850978
25.39533 -2.21744 -4.151314 1.623182 2.763611 -3.812792 -2.001571 4.384324 0.01321027 -4.398407 2.07985 1.124257 -0.8585684
25.39554 -2.612154 -4.512983 1.584056 2.223543 -4.355544 -1.856331 3.957136 -0.5604174 -3.890465 2.293675 1.236684 -1.0146
25.39308 -2.520367 -4.397037 2.256149 2.091863 -4.63843 -1.046045 3.774941 -1.143284 -2.75447 2.534642 0.5635763 -1.239542
25.3965 -2.451873 -4.109932 2.874861 2.20728 -4.254683 0.2644677 4.00146 -1.432415 -1.490167 3.463402 -0.4643234 -1.317148
25.39348 -2.371119 -3.777657 2.836152 1.571334 -4.244382 0.7971967 3.746778 -2.613638 -0.9152473 4.244115 -0.9552278 -1.307944
25.39616 -2.542989 -3.77389 2.598389 1.069738 -4.357088 0.9597103 3.569531 -3.466747 -0.6653178 4.548326 -1.286707 -1.234624
25.39317 -2.477293 -3.775288 2.630917 0.5822484 -4.476471 0.7536674 3.719385 -4.367169 -0.2537828 4.012263 -1.064703 -1.128852
25.39464 -2.439779 -3.68785 3.302012 0.744005 -3.889172 1.24866 4.28452 -4.232648 0.458879 3.416548 -1.333863 -0.9995397
25.39489 -2.693073 -3.954254 3.476758 -0.09684802 -4.597503 1.402416 3.241604 -4.638814 0.5201415 2.158934 -1.208856 -0.5842211
25.39444 -2.900834 -4.006212 3.853968 -0.5424059 -4.682728 2.199766 2.314948 -4.502245 1.472192 0.8833935 -1.296323 0.0575683
25.39328 -2.766473 -3.570523 4.122112 -0.5452397 -3.808851 3.165822 1.791067 -4.126428 3.205217 0.1714048 -1.864875 0.6073523
25.39395 -2.759377 -3.313777 3.90489 -0.7153167 -3.417647 3.708959 1.201962 -4.206518 4.373943 -0.131486 -1.97702 0.8515916
25.39198 -2.873582 -3.307509 3.343107 -1.263293 -4.031776 3.546453 0.2252249 -4.759563 4.390765 -0.4121067 -1.670728 0.9300516
25.39395 -2.76613 -2.840667 3.641021 -1.139558 -3.571699 4.10246 0.2675408 -4.259459 4.589026 -0.2412382 -1.723346 0.9682031
25.39408 -3.019951 -2.992377 3.656484 -1.834124 -3.688864 3.884578 -0.3628145 -4.116153 4.041339 -0.636009 -1.536878 1.304956
25.393 -3.130494 -2.666178 4.436203 -2.069923 -2.752387 4.570864 -0.8447571 -2.426486 3.545671 -0.9476085 -1.309664 1.45786
25.39316 -3.610643 -2.879436 4.271459 -2.598714 -2.21266 4.540155 -1.901312 -1.326681 3.136247 -2.408866 -0.4492029 1.578339
25.39214 -3.168075 -2.169182 3.985322 -2.548994 -1.702035 4.670306 -2.985984 -0.06545441 3.488072 -3.423412 -0.09890725 1.54746 ]

View File

@ -5,7 +5,7 @@
from pathlib import Path from pathlib import Path
import torch import torch
from utils import read_ark_txt, read_wave from utils import get_devices, read_ark_txt, read_wave
import kaldifeat import kaldifeat
@ -13,76 +13,98 @@ cur_dir = Path(__file__).resolve().parent
def test_fbank_default(): def test_fbank_default():
print("=====test_fbank_default=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.FbankOptions() opts = kaldifeat.FbankOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
fbank = kaldifeat.Fbank(opts) fbank = kaldifeat.Fbank(opts)
filename = cur_dir / "test_data/test.wav" filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename) wave = read_wave(filename).to(device)
features = fbank(wave) features = fbank(wave)
gt = read_ark_txt(cur_dir / "test_data/test.txt") gt = read_ark_txt(cur_dir / "test_data/test.txt")
assert torch.allclose(features, gt, rtol=1e-1) assert torch.allclose(features.cpu(), gt, rtol=1e-1)
def test_fbank_htk(): def test_fbank_htk():
print("=====test_fbank_htk=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.FbankOptions() opts = kaldifeat.FbankOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
opts.use_energy = True opts.use_energy = True
opts.htk_compat = True opts.htk_compat = True
fbank = kaldifeat.Fbank(opts) fbank = kaldifeat.Fbank(opts)
filename = cur_dir / "test_data/test.wav" filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename) wave = read_wave(filename).to(device)
features = fbank(wave) features = fbank(wave)
gt = read_ark_txt(cur_dir / "test_data/test-htk.txt") gt = read_ark_txt(cur_dir / "test_data/test-htk.txt")
assert torch.allclose(features, gt, rtol=1e-1) assert torch.allclose(features.cpu(), gt, rtol=1e-1)
def test_fbank_with_energy(): def test_fbank_with_energy():
print("=====test_fbank_with_energy=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.FbankOptions() opts = kaldifeat.FbankOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
opts.use_energy = True opts.use_energy = True
fbank = kaldifeat.Fbank(opts) fbank = kaldifeat.Fbank(opts)
filename = cur_dir / "test_data/test.wav" filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename) wave = read_wave(filename).to(device)
features = fbank(wave) features = fbank(wave)
gt = read_ark_txt(cur_dir / "test_data/test-with-energy.txt") gt = read_ark_txt(cur_dir / "test_data/test-with-energy.txt")
assert torch.allclose(features, gt, rtol=1e-1) assert torch.allclose(features.cpu(), gt, rtol=1e-1)
def test_fbank_40_bins(): def test_fbank_40_bins():
print("=====test_fbank_40_bins=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.FbankOptions() opts = kaldifeat.FbankOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
opts.mel_opts.num_bins = 40 opts.mel_opts.num_bins = 40
fbank = kaldifeat.Fbank(opts) fbank = kaldifeat.Fbank(opts)
filename = cur_dir / "test_data/test.wav" filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename) wave = read_wave(filename).to(device)
features = fbank(wave) features = fbank(wave)
gt = read_ark_txt(cur_dir / "test_data/test-40.txt") gt = read_ark_txt(cur_dir / "test_data/test-40.txt")
assert torch.allclose(features, gt, rtol=1e-1) assert torch.allclose(features.cpu(), gt, rtol=1e-1)
def test_fbank_40_bins_no_snip_edges(): def test_fbank_40_bins_no_snip_edges():
print("=====test_fbank_40_bins_no_snip_edges=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.FbankOptions() opts = kaldifeat.FbankOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
opts.mel_opts.num_bins = 40 opts.mel_opts.num_bins = 40
opts.frame_opts.snip_edges = False opts.frame_opts.snip_edges = False
fbank = kaldifeat.Fbank(opts) fbank = kaldifeat.Fbank(opts)
filename = cur_dir / "test_data/test.wav" filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename) wave = read_wave(filename).to(device)
features = fbank(wave) features = fbank(wave)
gt = read_ark_txt(cur_dir / "test_data/test-40-no-snip-edges.txt") gt = read_ark_txt(cur_dir / "test_data/test-40-no-snip-edges.txt")
assert torch.allclose(features, gt, rtol=1e-1) assert torch.allclose(features.cpu(), gt, rtol=1e-1)
def test_fbank_chunk(): def test_fbank_chunk():
print("=====test_fbank_chunk=====")
filename = cur_dir / "test_data/test-1hour.wav" filename = cur_dir / "test_data/test-1hour.wav"
if filename.is_file() is False: if filename.is_file() is False:
print( print(
@ -91,13 +113,16 @@ def test_fbank_chunk():
) )
return return
for device in get_devices():
print("device", device)
opts = kaldifeat.FbankOptions() opts = kaldifeat.FbankOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
opts.mel_opts.num_bins = 40 opts.mel_opts.num_bins = 40
opts.frame_opts.snip_edges = False opts.frame_opts.snip_edges = False
fbank = kaldifeat.Fbank(opts) fbank = kaldifeat.Fbank(opts)
wave = read_wave(filename) wave = read_wave(filename).to(device)
# You can use # You can use
# #
@ -111,10 +136,14 @@ def test_fbank_chunk():
def test_fbank_batch(): def test_fbank_batch():
wave0 = read_wave(cur_dir / "test_data/test.wav") print("=====test_fbank_chunk=====")
wave1 = read_wave(cur_dir / "test_data/test2.wav") for device in get_devices():
print("device", device)
wave0 = read_wave(cur_dir / "test_data/test.wav").to(device)
wave1 = read_wave(cur_dir / "test_data/test2.wav").to(device)
opts = kaldifeat.FbankOptions() opts = kaldifeat.FbankOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
fbank = kaldifeat.Fbank(opts) fbank = kaldifeat.Fbank(opts)

View File

@ -5,7 +5,7 @@
from pathlib import Path from pathlib import Path
import torch import torch
from utils import read_ark_txt, read_wave from utils import get_devices, read_ark_txt, read_wave
import kaldifeat import kaldifeat
@ -13,29 +13,37 @@ cur_dir = Path(__file__).resolve().parent
def test_mfcc_default(): def test_mfcc_default():
print("=====test_mfcc_default=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.MfccOptions() opts = kaldifeat.MfccOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
mfcc = kaldifeat.Mfcc(opts) mfcc = kaldifeat.Mfcc(opts)
filename = cur_dir / "test_data/test.wav" filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename) wave = read_wave(filename).to(device)
features = mfcc(wave) features = mfcc(wave)
gt = read_ark_txt(cur_dir / "test_data/test-mfcc.txt") gt = read_ark_txt(cur_dir / "test_data/test-mfcc.txt")
assert torch.allclose(features, gt, rtol=1e-1) assert torch.allclose(features.cpu(), gt, atol=1e-1)
def test_mfcc_no_snip_edges(): def test_mfcc_no_snip_edges():
print("=====test_mfcc_no_snip_edges=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.MfccOptions() opts = kaldifeat.MfccOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
opts.frame_opts.snip_edges = False opts.frame_opts.snip_edges = False
mfcc = kaldifeat.Mfcc(opts) mfcc = kaldifeat.Mfcc(opts)
filename = cur_dir / "test_data/test.wav" filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename) wave = read_wave(filename).to(device)
features = mfcc(wave) features = mfcc(wave)
gt = read_ark_txt(cur_dir / "test_data/test-mfcc-no-snip-edges.txt") gt = read_ark_txt(cur_dir / "test_data/test-mfcc-no-snip-edges.txt")
assert torch.allclose(features, gt, rtol=1e-1) assert torch.allclose(features.cpu(), gt, rtol=1e-1)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -131,12 +131,49 @@ def test_spectogram_options():
print(opts) print(opts)
def test_plp_options():
opts = kaldifeat.PlpOptions()
opts.lpc_order = 12
opts.num_ceps = 13
opts.use_energy = True
opts.energy_floor = 0.0
opts.raw_energy = True
opts.compress_factor = 0.33333
opts.cepstral_lifter = 22
opts.cepstral_scale = 1.0
opts.htk_compat = False
opts.device = torch.device("cpu")
frame_opts = opts.frame_opts
frame_opts.blackman_coeff = 0.42
frame_opts.dither = 1
frame_opts.frame_length_ms = 25
frame_opts.frame_shift_ms = 10
frame_opts.preemph_coeff = 0.97
frame_opts.remove_dc_offset = True
frame_opts.round_to_power_of_two = True
frame_opts.samp_freq = 16000
frame_opts.snip_edges = True
frame_opts.window_type = "povey"
mel_opts = opts.mel_opts
mel_opts.debug_mel = True
mel_opts.high_freq = 0
mel_opts.low_freq = 20
mel_opts.num_bins = 23
mel_opts.vtln_high = -500
mel_opts.vtln_low = 100
print(opts)
def main(): def main():
test_frame_extraction_options() test_frame_extraction_options()
test_mel_banks_options() test_mel_banks_options()
test_fbank_options() test_fbank_options()
test_mfcc_options() test_mfcc_options()
test_spectogram_options() test_spectogram_options()
test_plp_options()
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -0,0 +1,71 @@
#!/usr/bin/env python3
# Copyright 2021 Xiaomi Corporation (authors: Fangjun Kuang)
from pathlib import Path
import torch
from utils import get_devices, read_ark_txt, read_wave
import kaldifeat
cur_dir = Path(__file__).resolve().parent
def test_plp_default():
print("=====test_plp_default=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.PlpOptions()
opts.frame_opts.dither = 0
opts.device = device
plp = kaldifeat.Plp(opts)
filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename).to(device)
features = plp(wave)
gt = read_ark_txt(cur_dir / "test_data/test-plp.txt")
assert torch.allclose(features.cpu(), gt, rtol=1e-1)
def test_plp_no_snip_edges():
print("=====test_plp_no_snip_edges=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.PlpOptions()
opts.device = device
opts.frame_opts.dither = 0
opts.frame_opts.snip_edges = False
plp = kaldifeat.Plp(opts)
filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename).to(device)
features = plp(wave)
gt = read_ark_txt(cur_dir / "test_data/test-plp-no-snip-edges.txt")
assert torch.allclose(features.cpu(), gt, atol=1e-1)
def test_plp_htk_10_ceps():
print("=====test_plp_htk_10_ceps=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.PlpOptions()
opts.device = device
opts.htk_compat = True
opts.num_ceps = 10
opts.frame_opts.dither = 0
plp = kaldifeat.Plp(opts)
filename = cur_dir / "test_data/test.wav"
wave = read_wave(filename).to(device)
features = plp(wave)
gt = read_ark_txt(cur_dir / "test_data/test-plp-htk-10-ceps.txt")
assert torch.allclose(features.cpu(), gt, atol=1e-1)
if __name__ == "__main__":
test_plp_default()
test_plp_no_snip_edges()
test_plp_htk_10_ceps()

View File

@ -4,8 +4,7 @@
from pathlib import Path from pathlib import Path
import torch from utils import get_devices, read_ark_txt, read_wave
from utils import read_ark_txt, read_wave
import kaldifeat import kaldifeat
@ -13,7 +12,11 @@ cur_dir = Path(__file__).resolve().parent
def test_spectrogram_default(): def test_spectrogram_default():
print("=====test_spectrogram_default=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.SpectrogramOptions() opts = kaldifeat.SpectrogramOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
spectrogram = kaldifeat.Spectrogram(opts) spectrogram = kaldifeat.Spectrogram(opts)
filename = cur_dir / "test_data/test.wav" filename = cur_dir / "test_data/test.wav"
@ -22,12 +25,16 @@ def test_spectrogram_default():
features = spectrogram(wave) features = spectrogram(wave)
gt = read_ark_txt(cur_dir / "test_data/test-spectrogram.txt") gt = read_ark_txt(cur_dir / "test_data/test-spectrogram.txt")
assert torch.allclose(features, gt, atol=1.1) # assert torch.allclose(features.cpu(), gt, atol=1.1)
print(features[1, 145:148], gt[1, 145:148]) # they are different print(features[1, 145:148], gt[1, 145:148]) # they are different
def test_spectrogram_no_snip_edges(): def test_spectrogram_no_snip_edges():
print("=====test_spectrogram_no_snip_edges=====")
for device in get_devices():
print("device", device)
opts = kaldifeat.SpectrogramOptions() opts = kaldifeat.SpectrogramOptions()
opts.device = device
opts.frame_opts.dither = 0 opts.frame_opts.dither = 0
opts.frame_opts.snip_edges = False opts.frame_opts.snip_edges = False
spectrogram = kaldifeat.Spectrogram(opts) spectrogram = kaldifeat.Spectrogram(opts)
@ -35,9 +42,11 @@ def test_spectrogram_no_snip_edges():
wave = read_wave(filename).to(opts.device) wave = read_wave(filename).to(opts.device)
features = spectrogram(wave) features = spectrogram(wave)
gt = read_ark_txt(cur_dir / "test_data/test-spectrogram-no-snip-edges.txt") gt = read_ark_txt(
cur_dir / "test_data/test-spectrogram-no-snip-edges.txt"
)
assert torch.allclose(features, gt, atol=1.5) # assert torch.allclose(features.cpu(), gt, atol=1.5)
print(features[1, 145:148], gt[1, 145:148]) # they are different print(features[1, 145:148], gt[1, 145:148]) # they are different

View File

@ -1,5 +1,7 @@
# Copyright 2021 Xiaomi Corporation (authors: Fangjun Kuang) # Copyright 2021 Xiaomi Corporation (authors: Fangjun Kuang)
from typing import List
import numpy as np import numpy as np
import soundfile as sf import soundfile as sf
import torch import torch
@ -39,3 +41,12 @@ def read_ark_txt(filename) -> torch.Tensor:
features.append(data) features.append(data)
ans = torch.tensor(features) ans = torch.tensor(features)
return ans return ans
def get_devices() -> List[torch.device]:
ans = [torch.device("cpu")]
if torch.cuda.is_available():
ans.append(torch.device("cuda", 0))
if torch.cuda.device_count() > 1:
ans.append(torch.device("cuda", 1))
return ans

126
scripts/check_style_cpplint.sh Executable file
View File

@ -0,0 +1,126 @@
#!/bin/bash
#
# Copyright 2020 Mobvoi Inc. (authors: Fangjun Kuang)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Usage:
#
# (1) To check files of the last commit
# ./scripts/check_style_cpplint.sh
#
# (2) To check changed files not committed yet
# ./scripts/check_style_cpplint.sh 1
#
# (3) To check all files in the project
# ./scripts/check_style_cpplint.sh 2
cpplint_version="1.5.4"
cur_dir=$(cd $(dirname $BASH_SOURCE) && pwd)
kaldifeat_dir=$(cd $cur_dir/.. && pwd)
build_dir=$kaldifeat_dir/build
mkdir -p $build_dir
cpplint_src=$build_dir/cpplint-${cpplint_version}/cpplint.py
if [ ! -d "$build_dir/cpplint-${cpplint_version}" ]; then
pushd $build_dir
if command -v wget &> /dev/null; then
wget https://github.com/cpplint/cpplint/archive/${cpplint_version}.tar.gz
elif command -v curl &> /dev/null; then
curl -O -SL https://github.com/cpplint/cpplint/archive/${cpplint_version}.tar.gz
else
echo "Please install wget or curl to download cpplint"
exit 1
fi
tar xf ${cpplint_version}.tar.gz
rm ${cpplint_version}.tar.gz
# cpplint will report the following error for: __host__ __device__ (
#
# Extra space before ( in function call [whitespace/parens] [4]
#
# the following patch disables the above error
sed -i "3490i\ not Search(r'__host__ __device__\\\s+\\\(', fncall) and" $cpplint_src
popd
fi
source $kaldifeat_dir/scripts/utils.sh
# return true if the given file is a c++ source file
# return false otherwise
function is_source_code_file() {
case "$1" in
*.cc|*.h|*.cu)
echo true;;
*)
echo false;;
esac
}
function check_style() {
python3 $cpplint_src $1 || abort $1
}
function check_last_commit() {
files=$(git diff HEAD^1 --name-only --diff-filter=ACDMRUXB)
echo $files
}
function check_current_dir() {
files=$(git status -s -uno --porcelain | awk '{
if (NF == 4) {
# a file has been renamed
print $NF
} else {
print $2
}}')
echo $files
}
function do_check() {
case "$1" in
1)
echo "Check changed files"
files=$(check_current_dir)
;;
2)
echo "Check all files"
files=$(find $kaldifeat_dir/kaldifeat -name "*.h" -o -name "*.cc" -o -name "*.cu")
;;
*)
echo "Check last commit"
files=$(check_last_commit)
;;
esac
for f in $files; do
need_check=$(is_source_code_file $f)
if $need_check; then
[[ -f $f ]] && check_style $f
fi
done
}
function main() {
do_check $1
ok "Great! Style check passed!"
}
cd $kaldifeat_dir
main $1

19
scripts/utils.sh Normal file
View File

@ -0,0 +1,19 @@
#!/bin/bash
default='\033[0m'
bold='\033[1m'
red='\033[31m'
green='\033[32m'
function ok() {
printf "${bold}${green}[OK]${default} $1\n"
}
function error() {
printf "${bold}${red}[FAILED]${default} $1\n"
}
function abort() {
printf "${bold}${red}[FAILED]${default} $1\n"
exit 1
}