fix Llama lora merge crash
This commit is contained in:
parent
c1e2c4ea45
commit
51349ea1cc
|
@ -71,7 +71,7 @@ def export_model(args: Optional[Dict[str, Any]] = None):
|
|||
(config.top_p is not None and config.top_p != 1.0) or
|
||||
(config.typical_p is not None and config.typical_p != 1.0)
|
||||
):
|
||||
config.do_sample = False
|
||||
config.do_sample = True
|
||||
|
||||
model.save_pretrained(
|
||||
save_directory=model_args.export_dir,
|
||||
|
|
Loading…
Reference in New Issue