qwen train: lora

This commit is contained in:
a.hediehloo 2025-11-20 06:48:29 +00:00
parent df41e7686b
commit 4fb3504b33
2 changed files with 32 additions and 3 deletions

View File

@ -2,15 +2,20 @@
nproc_per_node=1 nproc_per_node=1
INFONCE_USE_BATCH=False \
CUDA_VISIBLE_DEVICES=0 \ CUDA_VISIBLE_DEVICES=0 \
NPROC_PER_NODE=$nproc_per_node \ NPROC_PER_NODE=$nproc_per_node \
swift sft \ swift sft \
--model $(pwd)/../../data/models/Qwen3-Embedding-0.6B/model \ --model $(pwd)/../../data/models/Qwen3-Embedding-0.6B/model \
--task_type embedding \ --task_type embedding \
--model_type qwen3_emb \ --model_type qwen3_emb \
--train_type full \ --train_type lora \
--lora_rank 8 \
--lora_alpha 16 \
--target_modules all-linear \
--dataset my_local_dataset \ --dataset my_local_dataset \
--custom_register_path /home/hediehloo/codes/embedding/embedding_model/data/dataset/my_dataset_register.py \ --custom_register_path $(pwd)/../../data/dataset/my_dataset_register.py \
--split_dataset_ratio 0.005 \ --split_dataset_ratio 0.005 \
--eval_strategy steps \ --eval_strategy steps \
--output_dir output \ --output_dir output \

24
train/qwen/merge_model.py Normal file
View File

@ -0,0 +1,24 @@
import json
import numpy as np
import os
from peft import PeftModel
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
def merge(base_model_path, peft_model_path, save_path):
base_model = AutoModelForCausalLM.from_pretrained(base_model_path, torch_dtype="bfloat16")
ft_model = PeftModel.from_pretrained(base_model, peft_model_path)
ft_model = ft_model.merge_and_unload()
ft_model.save_pretrained(save_path)
def main():
file_path = os.path.dirname(__file__)
base_model_path = file_path + "/../../data/models/Qwen3-Embedding-0.6B/model"
peft_model_path = file_path + "/output/v0-20251118-115015/checkpoint-3434"
save_path = file_path + "/output/v0-20251118-115015/merged_checkpoint-3434"
merge(base_model_path, peft_model_path, save_path)
if __name__ == "__main__":
main()