mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-09-19 05:54:20 +00:00
use explicit relative imports
This commit is contained in:
parent
e7b5a8ecbb
commit
9149a92dd8
@ -21,7 +21,8 @@ from typing import Optional, Tuple, Union
|
||||
|
||||
import torch
|
||||
from torch import Tensor, nn
|
||||
from transformer import Supervisions, Transformer, encoder_padding_mask
|
||||
|
||||
from .transformer import Supervisions, Transformer, encoder_padding_mask
|
||||
|
||||
|
||||
class Conformer(Transformer):
|
||||
|
@ -27,9 +27,6 @@ import k2
|
||||
import sentencepiece as spm
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
from asr_datamodule import GigaSpeechAsrDataModule
|
||||
from conformer import Conformer
|
||||
from gigaspeech_scoring import asr_text_post_processing
|
||||
|
||||
from icefall.bpe_graph_compiler import BpeCtcTrainingGraphCompiler
|
||||
from icefall.checkpoint import average_checkpoints, load_checkpoint
|
||||
@ -52,6 +49,10 @@ from icefall.utils import (
|
||||
write_error_stats,
|
||||
)
|
||||
|
||||
from .asr_datamodule import GigaSpeechAsrDataModule
|
||||
from .conformer import Conformer
|
||||
from .gigaspeech_scoring import asr_text_post_processing
|
||||
|
||||
|
||||
def get_parser():
|
||||
parser = argparse.ArgumentParser(
|
||||
|
@ -34,7 +34,6 @@ from torch import Tensor
|
||||
from torch.nn.parallel import DistributedDataParallel as DDP
|
||||
from torch.nn.utils import clip_grad_norm_
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
from transformer import Noam
|
||||
|
||||
from icefall.bpe_graph_compiler import BpeCtcTrainingGraphCompiler
|
||||
from icefall.checkpoint import load_checkpoint
|
||||
@ -50,6 +49,8 @@ from icefall.utils import (
|
||||
str2bool,
|
||||
)
|
||||
|
||||
from .transformer import Noam
|
||||
|
||||
|
||||
def get_parser():
|
||||
parser = argparse.ArgumentParser(
|
||||
|
@ -19,10 +19,11 @@ from typing import Dict, List, Optional, Tuple, Union
|
||||
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
from label_smoothing import LabelSmoothingLoss
|
||||
from subsampling import Conv2dSubsampling, VggSubsampling
|
||||
from torch.nn.utils.rnn import pad_sequence
|
||||
|
||||
from .label_smoothing import LabelSmoothingLoss
|
||||
from .subsampling import Conv2dSubsampling, VggSubsampling
|
||||
|
||||
# Note: TorchScript requires Dict/List/etc. to be fully typed.
|
||||
Supervisions = Dict[str, torch.Tensor]
|
||||
|
||||
|
@ -66,16 +66,14 @@ import k2
|
||||
import sentencepiece as spm
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
from asr_datamodule import GigaSpeechAsrDataModule
|
||||
from beam_search import (
|
||||
from .asr_datamodule import GigaSpeechAsrDataModule
|
||||
from .beam_search import (
|
||||
beam_search,
|
||||
fast_beam_search_one_best,
|
||||
greedy_search,
|
||||
greedy_search_batch,
|
||||
modified_beam_search,
|
||||
)
|
||||
from .gigaspeech_scoring import asr_text_post_processing
|
||||
from .train import get_params, get_transducer_model
|
||||
|
||||
from icefall.checkpoint import average_checkpoints, find_checkpoints, load_checkpoint
|
||||
from icefall.utils import (
|
||||
@ -85,6 +83,9 @@ from icefall.utils import (
|
||||
write_error_stats,
|
||||
)
|
||||
|
||||
from .gigaspeech_scoring import asr_text_post_processing
|
||||
from .train import get_params, get_transducer_model
|
||||
|
||||
|
||||
def get_parser():
|
||||
parser = argparse.ArgumentParser(
|
||||
|
@ -49,7 +49,7 @@ from pathlib import Path
|
||||
|
||||
import sentencepiece as spm
|
||||
import torch
|
||||
from train import get_params, get_transducer_model
|
||||
from .train import get_params, get_transducer_model
|
||||
|
||||
from icefall.checkpoint import average_checkpoints, find_checkpoints, load_checkpoint
|
||||
from icefall.utils import str2bool
|
||||
|
@ -56,14 +56,14 @@ import sentencepiece as spm
|
||||
import torch
|
||||
import torch.multiprocessing as mp
|
||||
import torch.nn as nn
|
||||
from asr_datamodule import GigaSpeechAsrDataModule
|
||||
from conformer import Conformer
|
||||
from decoder import Decoder
|
||||
from joiner import Joiner
|
||||
from .asr_datamodule import GigaSpeechAsrDataModule
|
||||
from .conformer import Conformer
|
||||
from .decoder import Decoder
|
||||
from .joiner import Joiner
|
||||
from lhotse.dataset.sampling.base import CutSampler
|
||||
from lhotse.utils import fix_random_seed
|
||||
from model import Transducer
|
||||
from optim import Eden, Eve
|
||||
from .model import Transducer
|
||||
from .optim import Eden, Eve
|
||||
from torch import Tensor
|
||||
from torch.cuda.amp import GradScaler
|
||||
from torch.nn.parallel import DistributedDataParallel as DDP
|
||||
|
@ -36,7 +36,7 @@ import argparse
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
from generate_unique_lexicon import filter_multiple_pronunications
|
||||
from .generate_unique_lexicon import filter_multiple_pronunications
|
||||
|
||||
from icefall.lexicon import read_lexicon
|
||||
|
||||
|
@ -41,7 +41,7 @@ from typing import Dict, List, Tuple
|
||||
import k2
|
||||
import sentencepiece as spm
|
||||
import torch
|
||||
from prepare_lang import (
|
||||
from .prepare_lang import (
|
||||
Lexicon,
|
||||
add_disambig_symbols,
|
||||
add_self_loops,
|
||||
|
@ -21,7 +21,7 @@ from typing import Dict, List, Optional
|
||||
import k2
|
||||
import sentencepiece as spm
|
||||
import torch
|
||||
from model import Transducer
|
||||
from .model import Transducer
|
||||
|
||||
from icefall.decode import Nbest, one_best_decoding
|
||||
from icefall.utils import add_eos, add_sos, get_texts
|
||||
|
@ -21,8 +21,8 @@ import warnings
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import torch
|
||||
from encoder_interface import EncoderInterface
|
||||
from scaling import (
|
||||
from .encoder_interface import EncoderInterface
|
||||
from .scaling import (
|
||||
ActivationBalancer,
|
||||
BasicNorm,
|
||||
DoubleSwish,
|
||||
|
@ -17,7 +17,7 @@
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
from scaling import ScaledConv1d, ScaledEmbedding
|
||||
from .scaling import ScaledConv1d, ScaledEmbedding
|
||||
|
||||
|
||||
class Decoder(nn.Module):
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
from scaling import ScaledLinear
|
||||
from .scaling import ScaledLinear
|
||||
|
||||
|
||||
class Joiner(nn.Module):
|
||||
|
@ -18,8 +18,8 @@
|
||||
import k2
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
from encoder_interface import EncoderInterface
|
||||
from scaling import ScaledLinear
|
||||
from .encoder_interface import EncoderInterface
|
||||
from .scaling import ScaledLinear
|
||||
|
||||
from icefall.utils import add_sos
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user