From 51349ea1ccbf3e53b408037986abd850a0963468 Mon Sep 17 00:00:00 2001 From: marko1616 Date: Sun, 24 Mar 2024 02:55:23 +0800 Subject: [PATCH] fix Llama lora merge crash --- src/llmtuner/train/tuner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llmtuner/train/tuner.py b/src/llmtuner/train/tuner.py index e9b195de..cd532f5e 100644 --- a/src/llmtuner/train/tuner.py +++ b/src/llmtuner/train/tuner.py @@ -71,7 +71,7 @@ def export_model(args: Optional[Dict[str, Any]] = None): (config.top_p is not None and config.top_p != 1.0) or (config.typical_p is not None and config.typical_p != 1.0) ): - config.do_sample = False + config.do_sample = True model.save_pretrained( save_directory=model_args.export_dir,