delete commented lines

This commit is contained in:
yaozengwei 2022-06-28 20:56:53 +08:00
parent 0c455db55d
commit d1362a5a2f

View File

@ -303,10 +303,8 @@ class ConformerEncoder(nn.Module):
assert num_layers - 1 not in aux_layers
self.aux_layers = set(aux_layers + [num_layers - 1])
# num_channels = encoder_layer.norm_final.num_channels
self.combiner = RandomCombine(
num_inputs=len(self.aux_layers),
# num_channels=num_channels,
final_weight=0.5,
pure_prob=0.333,
stddev=2.0,
@ -1080,7 +1078,6 @@ class RandomCombine(nn.Module):
def __init__(
self,
num_inputs: int,
# num_channels: int,
final_weight: float = 0.5,
pure_prob: float = 0.5,
stddev: float = 2.0,
@ -1091,8 +1088,6 @@ class RandomCombine(nn.Module):
The number of tensor inputs, which equals the number of layers'
outputs that are fed into this module. E.g. in an 18-layer neural
net if we output layers 16, 12, 18, num_inputs would be 3.
num_channels:
The number of channels on the input, e.g. 512.
final_weight:
The amount of weight or probability we assign to the
final layer when randomly choosing layers or when choosing
@ -1123,13 +1118,6 @@ class RandomCombine(nn.Module):
assert 0 < final_weight < 1, final_weight
assert num_inputs >= 1
# self.linear = nn.ModuleList(
# [
# nn.Linear(num_channels, num_channels, bias=True)
# for _ in range(num_inputs - 1)
# ]
# )
self.num_inputs = num_inputs
self.final_weight = final_weight
self.pure_prob = pure_prob
@ -1143,13 +1131,6 @@ class RandomCombine(nn.Module):
.item()
)
# self._reset_parameters()
# def _reset_parameters(self):
# for i in range(len(self.linear)):
# nn.init.eye_(self.linear[i].weight)
# nn.init.constant_(self.linear[i].bias, 0.0)
def forward(self, inputs: List[Tensor]) -> Tensor:
"""Forward function.
Args:
@ -1171,7 +1152,6 @@ class RandomCombine(nn.Module):
mod_inputs = []
for i in range(num_inputs - 1):
# mod_inputs.append(self.linear[i](inputs[i]))
mod_inputs.append(inputs[i])
mod_inputs.append(inputs[num_inputs - 1])