fix #2802
This commit is contained in:
parent
96ce76cd27
commit
b9f87cdc11
|
@ -107,14 +107,18 @@ def init_adapter(
|
||||||
adapter_to_merge = model_args.adapter_name_or_path
|
adapter_to_merge = model_args.adapter_name_or_path
|
||||||
|
|
||||||
for adapter in adapter_to_merge:
|
for adapter in adapter_to_merge:
|
||||||
model: "LoraModel" = PeftModel.from_pretrained(model, adapter)
|
model: "LoraModel" = PeftModel.from_pretrained(
|
||||||
|
model, adapter, offload_folder=model_args.offload_folder
|
||||||
|
)
|
||||||
model = model.merge_and_unload()
|
model = model.merge_and_unload()
|
||||||
|
|
||||||
if len(adapter_to_merge) > 0:
|
if len(adapter_to_merge) > 0:
|
||||||
logger.info("Merged {} adapter(s).".format(len(adapter_to_merge)))
|
logger.info("Merged {} adapter(s).".format(len(adapter_to_merge)))
|
||||||
|
|
||||||
if adapter_to_resume is not None: # resume lora training
|
if adapter_to_resume is not None: # resume lora training
|
||||||
model = PeftModel.from_pretrained(model, adapter_to_resume, is_trainable=is_trainable)
|
model = PeftModel.from_pretrained(
|
||||||
|
model, adapter_to_resume, is_trainable=is_trainable, offload_folder=model_args.offload_folder
|
||||||
|
)
|
||||||
|
|
||||||
if is_trainable and adapter_to_resume is None: # create new lora weights while training
|
if is_trainable and adapter_to_resume is None: # create new lora weights while training
|
||||||
if len(finetuning_args.lora_target) == 1 and finetuning_args.lora_target[0] == "all":
|
if len(finetuning_args.lora_target) == 1 and finetuning_args.lora_target[0] == "all":
|
||||||
|
|
Loading…
Reference in New Issue