use explicit relative imports

This commit is contained in:
shaynemei 2022-08-01 20:46:10 -07:00
parent e7b5a8ecbb
commit 9149a92dd8
14 changed files with 32 additions and 27 deletions

View File

@ -21,7 +21,8 @@ from typing import Optional, Tuple, Union
import torch
from torch import Tensor, nn
from transformer import Supervisions, Transformer, encoder_padding_mask
from .transformer import Supervisions, Transformer, encoder_padding_mask
class Conformer(Transformer):

View File

@ -27,9 +27,6 @@ import k2
import sentencepiece as spm
import torch
import torch.nn as nn
from asr_datamodule import GigaSpeechAsrDataModule
from conformer import Conformer
from gigaspeech_scoring import asr_text_post_processing
from icefall.bpe_graph_compiler import BpeCtcTrainingGraphCompiler
from icefall.checkpoint import average_checkpoints, load_checkpoint
@ -52,6 +49,10 @@ from icefall.utils import (
write_error_stats,
)
from .asr_datamodule import GigaSpeechAsrDataModule
from .conformer import Conformer
from .gigaspeech_scoring import asr_text_post_processing
def get_parser():
parser = argparse.ArgumentParser(

View File

@ -34,7 +34,6 @@ from torch import Tensor
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.nn.utils import clip_grad_norm_
from torch.utils.tensorboard import SummaryWriter
from transformer import Noam
from icefall.bpe_graph_compiler import BpeCtcTrainingGraphCompiler
from icefall.checkpoint import load_checkpoint
@ -50,6 +49,8 @@ from icefall.utils import (
str2bool,
)
from .transformer import Noam
def get_parser():
parser = argparse.ArgumentParser(

View File

@ -19,10 +19,11 @@ from typing import Dict, List, Optional, Tuple, Union
import torch
import torch.nn as nn
from label_smoothing import LabelSmoothingLoss
from subsampling import Conv2dSubsampling, VggSubsampling
from torch.nn.utils.rnn import pad_sequence
from .label_smoothing import LabelSmoothingLoss
from .subsampling import Conv2dSubsampling, VggSubsampling
# Note: TorchScript requires Dict/List/etc. to be fully typed.
Supervisions = Dict[str, torch.Tensor]

View File

@ -66,16 +66,14 @@ import k2
import sentencepiece as spm
import torch
import torch.nn as nn
from asr_datamodule import GigaSpeechAsrDataModule
from beam_search import (
from .asr_datamodule import GigaSpeechAsrDataModule
from .beam_search import (
beam_search,
fast_beam_search_one_best,
greedy_search,
greedy_search_batch,
modified_beam_search,
)
from .gigaspeech_scoring import asr_text_post_processing
from .train import get_params, get_transducer_model
from icefall.checkpoint import average_checkpoints, find_checkpoints, load_checkpoint
from icefall.utils import (
@ -85,6 +83,9 @@ from icefall.utils import (
write_error_stats,
)
from .gigaspeech_scoring import asr_text_post_processing
from .train import get_params, get_transducer_model
def get_parser():
parser = argparse.ArgumentParser(

View File

@ -49,7 +49,7 @@ from pathlib import Path
import sentencepiece as spm
import torch
from train import get_params, get_transducer_model
from .train import get_params, get_transducer_model
from icefall.checkpoint import average_checkpoints, find_checkpoints, load_checkpoint
from icefall.utils import str2bool

View File

@ -56,14 +56,14 @@ import sentencepiece as spm
import torch
import torch.multiprocessing as mp
import torch.nn as nn
from asr_datamodule import GigaSpeechAsrDataModule
from conformer import Conformer
from decoder import Decoder
from joiner import Joiner
from .asr_datamodule import GigaSpeechAsrDataModule
from .conformer import Conformer
from .decoder import Decoder
from .joiner import Joiner
from lhotse.dataset.sampling.base import CutSampler
from lhotse.utils import fix_random_seed
from model import Transducer
from optim import Eden, Eve
from .model import Transducer
from .optim import Eden, Eve
from torch import Tensor
from torch.cuda.amp import GradScaler
from torch.nn.parallel import DistributedDataParallel as DDP

View File

@ -36,7 +36,7 @@ import argparse
from pathlib import Path
from typing import Dict, List
from generate_unique_lexicon import filter_multiple_pronunications
from .generate_unique_lexicon import filter_multiple_pronunications
from icefall.lexicon import read_lexicon

View File

@ -41,7 +41,7 @@ from typing import Dict, List, Tuple
import k2
import sentencepiece as spm
import torch
from prepare_lang import (
from .prepare_lang import (
Lexicon,
add_disambig_symbols,
add_self_loops,

View File

@ -21,7 +21,7 @@ from typing import Dict, List, Optional
import k2
import sentencepiece as spm
import torch
from model import Transducer
from .model import Transducer
from icefall.decode import Nbest, one_best_decoding
from icefall.utils import add_eos, add_sos, get_texts

View File

@ -21,8 +21,8 @@ import warnings
from typing import List, Optional, Tuple
import torch
from encoder_interface import EncoderInterface
from scaling import (
from .encoder_interface import EncoderInterface
from .scaling import (
ActivationBalancer,
BasicNorm,
DoubleSwish,

View File

@ -17,7 +17,7 @@
import torch
import torch.nn as nn
import torch.nn.functional as F
from scaling import ScaledConv1d, ScaledEmbedding
from .scaling import ScaledConv1d, ScaledEmbedding
class Decoder(nn.Module):

View File

@ -16,7 +16,7 @@
import torch
import torch.nn as nn
from scaling import ScaledLinear
from .scaling import ScaledLinear
class Joiner(nn.Module):

View File

@ -18,8 +18,8 @@
import k2
import torch
import torch.nn as nn
from encoder_interface import EncoderInterface
from scaling import ScaledLinear
from .encoder_interface import EncoderInterface
from .scaling import ScaledLinear
from icefall.utils import add_sos