From ffde762b9d9c861fbbd77b0d6189185d47021408 Mon Sep 17 00:00:00 2001 From: dohe0342 Date: Thu, 2 Feb 2023 13:57:37 +0900 Subject: [PATCH] from local --- .../ASR/conformer_ctc/.transformer.py.swp | Bin 69632 -> 69632 bytes egs/aishell/ASR/conformer_ctc/transformer.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/egs/aishell/ASR/conformer_ctc/.transformer.py.swp b/egs/aishell/ASR/conformer_ctc/.transformer.py.swp index cc9581a6e323f3e9df6f60401e861006a646e93f..08932d237afdd392433d8d43e70e54a469b06b8e 100644 GIT binary patch delta 72 zcmZozz|ydQMKsAE%+puFQqO<^2m}}yRyy5I4&EsGUY@m!n}I=SGN(d0qtNC?g^ApP bX+`GE`9%te#haCN?mh+pz8@9A diff --git a/egs/aishell/ASR/conformer_ctc/transformer.py b/egs/aishell/ASR/conformer_ctc/transformer.py index 1218c350c..b82cc486f 100644 --- a/egs/aishell/ASR/conformer_ctc/transformer.py +++ b/egs/aishell/ASR/conformer_ctc/transformer.py @@ -24,7 +24,7 @@ from label_smoothing import LabelSmoothingLoss from subsampling import Conv2dSubsampling, VggSubsampling from torch.nn.utils.rnn import pad_sequence from torch.nn.modules import Module -import torch.tensor as Tensor +from torch import Tensor # Note: TorchScript requires Dict/List/etc. to be fully typed. Supervisions = Dict[str, torch.Tensor]