This commit is contained in:
hiyouga 2024-03-13 12:33:45 +08:00
parent 96ce76cd27
commit b9f87cdc11
1 changed files with 6 additions and 2 deletions

View File

@ -107,14 +107,18 @@ def init_adapter(
adapter_to_merge = model_args.adapter_name_or_path adapter_to_merge = model_args.adapter_name_or_path
for adapter in adapter_to_merge: for adapter in adapter_to_merge:
model: "LoraModel" = PeftModel.from_pretrained(model, adapter) model: "LoraModel" = PeftModel.from_pretrained(
model, adapter, offload_folder=model_args.offload_folder
)
model = model.merge_and_unload() model = model.merge_and_unload()
if len(adapter_to_merge) > 0: if len(adapter_to_merge) > 0:
logger.info("Merged {} adapter(s).".format(len(adapter_to_merge))) logger.info("Merged {} adapter(s).".format(len(adapter_to_merge)))
if adapter_to_resume is not None: # resume lora training if adapter_to_resume is not None: # resume lora training
model = PeftModel.from_pretrained(model, adapter_to_resume, is_trainable=is_trainable) model = PeftModel.from_pretrained(
model, adapter_to_resume, is_trainable=is_trainable, offload_folder=model_args.offload_folder
)
if is_trainable and adapter_to_resume is None: # create new lora weights while training if is_trainable and adapter_to_resume is None: # create new lora weights while training
if len(finetuning_args.lora_target) == 1 and finetuning_args.lora_target[0] == "all": if len(finetuning_args.lora_target) == 1 and finetuning_args.lora_target[0] == "all":