From 5a13b3baa63225e7f79e024610722de0f87e0acc Mon Sep 17 00:00:00 2001 From: hiyouga <467089858@qq.com> Date: Tue, 4 Jun 2024 00:31:10 +0800 Subject: [PATCH] tiny fix --- examples/extras/fsdp_qlora/single_node.sh | 4 ---- scripts/llama_pro.py | 8 ++++---- src/llamafactory/data/loader.py | 2 +- 3 files changed, 5 insertions(+), 9 deletions(-) diff --git a/examples/extras/fsdp_qlora/single_node.sh b/examples/extras/fsdp_qlora/single_node.sh index 54ec2bd2..fac8cdee 100644 --- a/examples/extras/fsdp_qlora/single_node.sh +++ b/examples/extras/fsdp_qlora/single_node.sh @@ -1,10 +1,6 @@ #!/bin/bash # DO NOT use GPTQ/AWQ model in FSDP+QLoRA -pip install "transformers>=4.39.1" -pip install "accelerate>=0.28.0" -pip install "bitsandbytes>=0.43.0" - CUDA_VISIBLE_DEVICES=0,1 accelerate launch \ --config_file examples/accelerate/fsdp_config.yaml \ src/train.py examples/extras/fsdp_qlora/llama3_lora_sft.yaml diff --git a/scripts/llama_pro.py b/scripts/llama_pro.py index 997b3496..727998ae 100644 --- a/scripts/llama_pro.py +++ b/scripts/llama_pro.py @@ -104,10 +104,10 @@ def block_expansion( print("Model weights saved in {}".format(output_dir)) print("Fine-tune this model with:") - print(" --model_name_or_path {} \\".format(output_dir)) - print(" --finetuning_type freeze \\") - print(" --freeze_trainable_layers {} \\".format(num_expand)) - print(" --use_llama_pro") + print("model_name_or_path: {}".format(output_dir)) + print("finetuning_type: freeze") + print("freeze_trainable_layers: {}".format(num_expand)) + print("use_llama_pro: true") if __name__ == "__main__": diff --git a/src/llamafactory/data/loader.py b/src/llamafactory/data/loader.py index 4d0503c3..7d013d27 100644 --- a/src/llamafactory/data/loader.py +++ b/src/llamafactory/data/loader.py @@ -179,7 +179,7 @@ def get_dataset( if training_args.should_save: dataset.save_to_disk(data_args.tokenized_path) logger.info("Tokenized dataset saved at {}.".format(data_args.tokenized_path)) - logger.info("Please restart the training with `--tokenized_path {}`.".format(data_args.tokenized_path)) + logger.info("Please restart the training with `tokenized_path: {}`.".format(data_args.tokenized_path)) sys.exit(0)