mirror of
https://github.com/k2-fsa/icefall.git
synced 2025-12-11 06:55:27 +00:00
from local
This commit is contained in:
parent
2a54d49f96
commit
09701ed03a
Binary file not shown.
@ -229,6 +229,48 @@ class TransformerEncoderAdapter(TransformerEncoder):
|
||||
return x, layer_results
|
||||
|
||||
|
||||
class LoRAModule(nn.Module):
|
||||
"""
|
||||
Implements a residual adapter based on https://arxiv.org/pdf/1909.08478.pdf
|
||||
modules similar to the original residual adapter except layernorm location (first -> last)
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
embedding_dim: float = 768,
|
||||
layer_num: int = 12,
|
||||
proj_dim: float = 512,
|
||||
) -> None:
|
||||
|
||||
super().__init__()
|
||||
|
||||
self.type = 'linear'
|
||||
|
||||
def build_adapter(embedding_dim, proj_dim, type_=self.type):
|
||||
if type_ == 'conv':
|
||||
return ConvolutionModule(768, 31)
|
||||
else:
|
||||
return nn.Sequential(
|
||||
#nn.LayerNorm(embedding_dim),
|
||||
nn.Linear(embedding_dim, proj_dim),
|
||||
nn.ReLU(),
|
||||
nn.Linear(proj_dim, embedding_dim),
|
||||
nn.LayerNorm(embedding_dim),
|
||||
)
|
||||
|
||||
self.adapter_layers = nn.ModuleList(
|
||||
[build_adapter(embedding_dim, proj_dim, type_=self.type) for _ in range(layer_num)]
|
||||
)
|
||||
|
||||
def forward(self, x, layer_id=-1):
|
||||
x = x.transpose(0, 1)
|
||||
residual = x
|
||||
x = self.adapter_layers[layer_id](x)
|
||||
x = residual + x
|
||||
x = x.transpose(0, 1)
|
||||
|
||||
return x
|
||||
|
||||
|
||||
class ResidualAdapterModule(nn.Module):
|
||||
"""
|
||||
Implements a residual adapter based on https://arxiv.org/pdf/1909.08478.pdf
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user