from local
This commit is contained in:
parent
6cca73c4b4
commit
383cb553f4
Binary file not shown.
@ -270,35 +270,6 @@ class LoRAModule(nn.Module):
|
|||||||
x = x.transpose(0, 1)
|
x = x.transpose(0, 1)
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
'''
|
|
||||||
self.type = 'linear'
|
|
||||||
|
|
||||||
def build_adapter(embedding_dim, proj_dim, type_=self.type):
|
|
||||||
if type_ == 'conv':
|
|
||||||
return ConvolutionModule(768, 31)
|
|
||||||
else:
|
|
||||||
return nn.Sequential(
|
|
||||||
#nn.LayerNorm(embedding_dim),
|
|
||||||
nn.Linear(embedding_dim, proj_dim),
|
|
||||||
nn.ReLU(),
|
|
||||||
nn.Linear(proj_dim, embedding_dim),
|
|
||||||
nn.LayerNorm(embedding_dim),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.adapter_layers = nn.ModuleList(
|
|
||||||
[build_adapter(embedding_dim, proj_dim, type_=self.type) for _ in range(layer_num)]
|
|
||||||
)
|
|
||||||
|
|
||||||
def forward(self, x, layer_id=-1):
|
|
||||||
x = x.transpose(0, 1)
|
|
||||||
residual = x
|
|
||||||
x = self.adapter_layers[layer_id](x)
|
|
||||||
x = residual + x
|
|
||||||
x = x.transpose(0, 1)
|
|
||||||
|
|
||||||
return x
|
|
||||||
'''
|
|
||||||
|
|
||||||
class ResidualAdapterModule(nn.Module):
|
class ResidualAdapterModule(nn.Module):
|
||||||
"""
|
"""
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user