LLaMA-Factory-310P3/results/lora_sft_template.yaml

42 lines
803 B
YAML
Raw Normal View History

### model
model_name_or_path: ../../llm/baichuan
### method
stage: sft
do_train: true
finetuning_type: lora
lora_target: all
### dataset
dataset: belle_1m
template: baichuan
cutoff_len: 1024
max_samples: 10000
overwrite_cache: true
preprocessing_num_workers: 16
### output
output_dir: ./results/lora_sft_2/Baichuan2-7B/Baichuan2_lora_sft_1_single_step500
logging_steps: 3
save_steps: 500
plot_loss: true
overwrite_output_dir: true
### train
per_device_train_batch_size: 2
gradient_accumulation_steps: 8
2024-10-23 16:06:02 +08:00
learning_rate: 1.0e-4
num_train_epochs: 10.0
lr_scheduler_type: cosine
warmup_ratio: 0.1
2024-09-05 15:49:32 +08:00
fp16: true
ddp_timeout: 180000000
max_steps: 500
include_num_input_tokens_seen: true
include_tokens_per_second: true
### eval
val_size: 0.1
per_device_eval_batch_size: 2
eval_strategy: steps
eval_steps: 500