from local

This commit is contained in:
dohe0342 2023-05-25 21:02:43 +09:00
parent 97ab08ed80
commit 24b2f38773
2 changed files with 4 additions and 4 deletions

View File

@ -1606,13 +1606,13 @@ def run_adapter(rank, world_size, args, wb=None):
lora_modules = []
for modules in model.modules():
if isinstance(modules, fairseq.modules.multihead_attention.MultiheadAttention):
lora_modules.append(LoRAHook(modules))
#for module in modules.modules():
#lora_modules.append(LoRAHook(modules))
for module in modules.modules():
# if rank == 0: logging.info(module)
#print(module)
#if isinstance(module, torch.nn.Linear):
if isinstance(module, torch.nn.Linear):
#if rank == 0: print(module)
#lora_modules.append(LoRAHook(module))
lora_modules.append(LoRAHook(module))
adapter_names = []
adapter_param = []