fix export

This commit is contained in:
hiyouga 2024-03-15 15:06:30 +08:00
parent 6ebde4f23e
commit 6bc2c23b6d
1 changed files with 2 additions and 1 deletions

View File

@ -60,8 +60,9 @@ def export_model(args: Optional[Dict[str, Any]] = None):
if getattr(model, "quantization_method", None) is None: # cannot convert dtype of a quantized model
output_dtype = getattr(model.config, "torch_dtype", torch.float16)
model = model.to(output_dtype)
setattr(model.config, "torch_dtype", output_dtype)
for param in model.parameters():
param.data = param.data.to(output_dtype)
model.save_pretrained(
save_directory=model_args.export_dir,