Fix typos, remove unused packages, normalize comments (#1678)

This commit is contained in:
Yifan Yang 2024-07-04 14:19:45 +08:00 committed by GitHub
parent ebbd396c2b
commit cbcac23d26
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 7 additions and 19 deletions

View File

@ -1,4 +1,4 @@
# Copyright 2021 Piotr Żelasko
# Copyright 2021 Piotr Żelasko
# Copyright 2022 Xiaomi Corporation (Author: Mingshuang Luo)
#
# See ../../../../LICENSE for clarification regarding multiple authors

View File

@ -133,7 +133,6 @@ from icefall.checkpoint import (
from icefall.lexicon import Lexicon
from icefall.utils import (
AttributeDict,
make_pad_mask,
setup_logger,
store_transcripts,
str2bool,

View File

@ -72,7 +72,6 @@ import k2
import onnx
import torch
import torch.nn as nn
from decoder import Decoder
from onnxruntime.quantization import QuantType, quantize_dynamic
from scaling_converter import convert_scaled_to_non_scaled
from train import add_model_arguments, get_model, get_params

View File

@ -40,15 +40,12 @@ Usage of this script:
import argparse
import logging
import math
from typing import List, Optional
import k2
import kaldifeat
import torch
import torchaudio
from kaldifeat import FbankOptions, OnlineFbank, OnlineFeature
from torch.nn.utils.rnn import pad_sequence
def get_parser():

View File

@ -45,7 +45,7 @@ class Joiner(nn.Module):
Output from the encoder. Its shape is (N, T, s_range, C).
decoder_out:
Output from the decoder. Its shape is (N, T, s_range, C).
project_input:
project_input:
If true, apply input projections encoder_proj and decoder_proj.
If this is false, it is the user's responsibility to do this
manually.

View File

@ -82,8 +82,6 @@ import logging
import torch
from onnx_pretrained import OnnxModel
from icefall import is_module_available
def get_parser():
parser = argparse.ArgumentParser(

View File

@ -1,4 +1,4 @@
# Copyright 2022 Xiaomi Corp. (authors: Daniel Povey)
# Copyright 2022 Xiaomi Corp. (authors: Daniel Povey)
#
# See ../LICENSE for clarification regarding multiple authors
#
@ -22,7 +22,7 @@ from typing import Dict, List, Optional, Tuple, Union
import torch
from lhotse.utils import fix_random_seed
from torch import Tensor, nn
from torch import Tensor
from torch.optim import Optimizer

View File

@ -126,8 +126,6 @@ from export import num_tokens
from torch.nn.utils.rnn import pad_sequence
from train import add_model_arguments, get_model, get_params
from icefall.utils import make_pad_mask
def get_parser():
parser = argparse.ArgumentParser(

View File

@ -1,4 +1,5 @@
# Copyright 2022-2023 Xiaomi Corp. (authors: Fangjun Kuang, Zengwei Yao)
# Copyright 2022-2023 Xiaomi Corp. (authors: Fangjun Kuang,
# Zengwei Yao)
#
# See ../../../../LICENSE for clarification regarding multiple authors
#
@ -22,7 +23,7 @@ BasicNorm is replaced by a module with `exp` removed.
"""
import copy
from typing import List, Tuple
from typing import List
import torch
import torch.nn as nn

View File

@ -512,10 +512,6 @@ def get_params() -> AttributeDict:
- subsampling_factor: The subsampling factor for the model.
- encoder_dim: Hidden dim for multi-head attention model.
- num_decoder_layers: Number of decoder layer of transformer decoder.
- warm_step: The warmup period that dictates the decay of the
scale on "simple" (un-pruned) loss.
"""