from local

This commit is contained in:
dohe0342 2023-05-25 19:38:59 +09:00
parent b6c68790b0
commit 4e702886e5
2 changed files with 13 additions and 10 deletions

View File

@ -1607,27 +1607,30 @@ def run_adapter(rank, world_size, args, wb=None):
lora_modules = []
for modules in model.modules():
if isinstance(modules, fairseq.modules.multihead_attention.MultiheadAttention):
#lora_modules.append(LoRAHook(modules))
for module in modules.modules():
lora_modules.append(LoRAHook(modules))
#for module in modules.modules():
# if rank == 0: logging.info(module)
#print(module)
if isinstance(module, torch.nn.Linear):
#if isinstance(module, torch.nn.Linear):
#if rank == 0: print(module)
lora_modules.append(LoRAHook(module))
#lora_modules.append(LoRAHook(module))
adapter_names = []
adapter_param = []
'''
for i, lora in enumerate(lora_modules):
for n, p in lora.lora.named_parameters():
new_n = str(i) + n
adapter_names.append(new_n)
adapter_param.append(p)
#for n, p in model.named_parameters():
#if 'joiner' in n or 'simple' in n or 'ctc' in n:
# p.requires_grad = True
#else:
# p.requires_grad = False
'''
for n, p in model.named_parameters():
if 'joiner' in n or 'simple' in n or 'ctc' in n:
adapter_names.append(n)
adapter_param.append(p)
p.requires_grad = True
else:
p.requires_grad = False
#for lora in lora_modules:
# print(lora.lora.state_dict())