from local

This commit is contained in:
dohe0342 2023-04-11 15:55:55 +09:00
parent 62d4c24d69
commit 0029ababe9
3 changed files with 3 additions and 3 deletions

View File

@ -1579,6 +1579,7 @@ def run_adapter(rank, world_size, args, wb=None):
logging.info("Using DDP") logging.info("Using DDP")
model = DDP(model, device_ids=[rank], find_unused_parameters=True) model = DDP(model, device_ids=[rank], find_unused_parameters=True)
'''
adapter_names = [] adapter_names = []
adapter_param = [] adapter_param = []
for n, p in model.named_parameters(): for n, p in model.named_parameters():
@ -1593,8 +1594,6 @@ def run_adapter(rank, world_size, args, wb=None):
for n, p in model.named_parameters(): for n, p in model.named_parameters():
p.requires_grad = False p.requires_grad = False
prompt = torch.nn.Parameter(torch.randn(50, 512)).to(device)
optimizer_adapter = ScaledAdam( optimizer_adapter = ScaledAdam(
adapter_param, adapter_param,
lr=params.adapter_lr, lr=params.adapter_lr,
@ -1602,13 +1601,14 @@ def run_adapter(rank, world_size, args, wb=None):
parameters_names=[adapter_names], parameters_names=[adapter_names],
) )
''' '''
prompt = torch.nn.Parameter(torch.randn(50, 512)).to(device)
optimizer_adapter = ScaledAdam( optimizer_adapter = ScaledAdam(
[prompt], [prompt],
lr=params.adapter_lr, lr=params.adapter_lr,
clipping_scale=5.0, clipping_scale=5.0,
parameters_names=['prompt'], parameters_names=['prompt'],
) )
'''
scheduler_adapter = Eden(optimizer_adapter, 10000, 7) #params.lr_batche, params.lr_epochs) scheduler_adapter = Eden(optimizer_adapter, 10000, 7) #params.lr_batche, params.lr_epochs)
optimizer, scheduler = optimizer_adapter, scheduler_adapter optimizer, scheduler = optimizer_adapter, scheduler_adapter