use explicit relative imports for aimeeting

This commit is contained in:
shaynemei 2022-08-01 21:39:09 -07:00
parent 6df69603a8
commit 8714df792a
6 changed files with 14 additions and 15 deletions

View File

@ -39,7 +39,7 @@ from typing import Dict, List
import k2
import torch
from prepare_lang import (
from .prepare_lang import (
Lexicon,
add_disambig_symbols,
add_self_loops,

View File

@ -22,7 +22,7 @@ import os
import tempfile
import k2
from prepare_lang import (
from .prepare_lang import (
add_disambig_symbols,
generate_id_map,
get_phones,

View File

@ -59,8 +59,8 @@ from typing import Dict, List, Optional, Tuple
import k2
import torch
import torch.nn as nn
from asr_datamodule import AlimeetingAsrDataModule
from beam_search import (
from .asr_datamodule import AlimeetingAsrDataModule
from .beam_search import (
beam_search,
fast_beam_search_one_best,
greedy_search,
@ -68,7 +68,7 @@ from beam_search import (
modified_beam_search,
)
from lhotse.cut import Cut
from train import get_params, get_transducer_model
from .train import get_params, get_transducer_model
from icefall.checkpoint import (
average_checkpoints,

View File

@ -46,7 +46,7 @@ import logging
from pathlib import Path
import torch
from train import get_params, get_transducer_model
from .train import get_params, get_transducer_model
from icefall.checkpoint import average_checkpoints, load_checkpoint
from icefall.lexicon import Lexicon

View File

@ -71,7 +71,7 @@ from beam_search import (
modified_beam_search,
)
from torch.nn.utils.rnn import pad_sequence
from train import get_params, get_transducer_model
from .train import get_params, get_transducer_model
from icefall.lexicon import Lexicon

View File

@ -53,19 +53,18 @@ from shutil import copyfile
from typing import Any, Dict, Optional, Tuple, Union
import k2
import optim
import torch
import torch.multiprocessing as mp
import torch.nn as nn
from asr_datamodule import AlimeetingAsrDataModule
from conformer import Conformer
from decoder import Decoder
from joiner import Joiner
from .asr_datamodule import AlimeetingAsrDataModule
from .conformer import Conformer
from .decoder import Decoder
from .joiner import Joiner
from lhotse.cut import Cut
from lhotse.dataset.sampling.base import CutSampler
from lhotse.utils import fix_random_seed
from model import Transducer
from optim import Eden, Eve
from .model import Transducer
from .optim import Eden, Eve, LRScheduler
from torch import Tensor
from torch.cuda.amp import GradScaler
from torch.nn.parallel import DistributedDataParallel as DDP
@ -82,7 +81,7 @@ from icefall.lexicon import Lexicon
from icefall.utils import AttributeDict, MetricsTracker, setup_logger, str2bool
LRSchedulerType = Union[
torch.optim.lr_scheduler._LRScheduler, optim.LRScheduler
torch.optim.lr_scheduler._LRScheduler, LRScheduler
]
os.environ["CUDA_LAUNCH_BLOCKING"] = "1"