LLaMA-Factory-Mirror/scripts/llamafy_baichuan2.py

110 lines
4.2 KiB
Python
Raw Permalink Normal View History

2023-09-08 22:59:41 +08:00
# coding=utf-8
2024-06-15 17:54:33 +08:00
# Copyright 2024 the LlamaFactory team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
2023-09-08 22:59:41 +08:00
2024-01-20 20:15:56 +08:00
import json
2023-09-08 22:59:41 +08:00
import os
2024-01-20 20:15:56 +08:00
from collections import OrderedDict
2024-08-09 19:16:23 +08:00
from typing import Any, Dict
2024-01-20 20:15:56 +08:00
2023-09-08 22:59:41 +08:00
import fire
import torch
2023-12-25 18:29:34 +08:00
from safetensors.torch import save_file
2024-01-20 20:15:56 +08:00
from tqdm import tqdm
2023-12-25 18:29:34 +08:00
from transformers.modeling_utils import (
SAFE_WEIGHTS_INDEX_NAME,
2024-01-20 20:15:56 +08:00
SAFE_WEIGHTS_NAME,
WEIGHTS_INDEX_NAME,
2023-12-25 18:29:34 +08:00
WEIGHTS_NAME,
2024-01-20 20:15:56 +08:00
shard_checkpoint,
2023-12-25 18:29:34 +08:00
)
2023-09-08 22:59:41 +08:00
2023-10-08 22:05:36 +08:00
CONFIG_NAME = "config.json"
2023-09-08 22:59:41 +08:00
2024-01-20 20:15:56 +08:00
def save_weight(input_dir: str, output_dir: str, shard_size: str, save_safetensors: bool):
2023-10-08 22:05:36 +08:00
baichuan2_state_dict: Dict[str, torch.Tensor] = OrderedDict()
2024-01-18 00:37:37 +08:00
for filepath in tqdm(os.listdir(input_dir), desc="Load weights"):
2023-09-09 13:50:29 +08:00
if os.path.isfile(os.path.join(input_dir, filepath)) and filepath.endswith(".bin"):
shard_weight = torch.load(os.path.join(input_dir, filepath), map_location="cpu")
baichuan2_state_dict.update(shard_weight)
2023-09-08 22:59:41 +08:00
2023-10-08 22:05:36 +08:00
llama2_state_dict: Dict[str, torch.Tensor] = OrderedDict()
2023-12-25 18:29:34 +08:00
for key, value in tqdm(baichuan2_state_dict.items(), desc="Convert format"):
2023-09-08 22:59:41 +08:00
if "W_pack" in key:
2023-10-08 22:05:36 +08:00
proj_size = value.size(0) // 3
llama2_state_dict[key.replace("W_pack", "q_proj")] = value[:proj_size, :]
2024-01-20 20:15:56 +08:00
llama2_state_dict[key.replace("W_pack", "k_proj")] = value[proj_size : 2 * proj_size, :]
llama2_state_dict[key.replace("W_pack", "v_proj")] = value[2 * proj_size :, :]
2023-09-08 22:59:41 +08:00
elif "lm_head" in key:
llama2_state_dict[key] = torch.nn.functional.normalize(value)
else:
llama2_state_dict[key] = value
2023-12-25 18:29:34 +08:00
weights_name = SAFE_WEIGHTS_NAME if save_safetensors else WEIGHTS_NAME
shards, index = shard_checkpoint(llama2_state_dict, max_shard_size=shard_size, weights_name=weights_name)
for shard_file, shard in tqdm(shards.items(), desc="Save weights"):
if save_safetensors:
save_file(shard, os.path.join(output_dir, shard_file), metadata={"format": "pt"})
else:
torch.save(shard, os.path.join(output_dir, shard_file))
2024-01-20 20:15:56 +08:00
2023-10-08 22:05:36 +08:00
if index is None:
print("Model weights saved in {}".format(os.path.join(output_dir, WEIGHTS_NAME)))
else:
2023-12-25 18:29:34 +08:00
index_name = SAFE_WEIGHTS_INDEX_NAME if save_safetensors else WEIGHTS_INDEX_NAME
with open(os.path.join(output_dir, index_name), "w", encoding="utf-8") as f:
2023-10-08 22:05:36 +08:00
json.dump(index, f, indent=2, sort_keys=True)
print("Model weights saved in {}".format(output_dir))
2023-09-08 22:59:41 +08:00
2024-01-20 20:15:56 +08:00
def save_config(input_dir: str, output_dir: str):
2023-10-08 22:05:36 +08:00
with open(os.path.join(input_dir, CONFIG_NAME), "r", encoding="utf-8") as f:
llama2_config_dict: Dict[str, Any] = json.load(f)
llama2_config_dict["architectures"] = ["LlamaForCausalLM"]
llama2_config_dict.pop("auto_map", None)
llama2_config_dict.pop("tokenizer_class", None)
llama2_config_dict["model_type"] = "llama"
with open(os.path.join(output_dir, CONFIG_NAME), "w", encoding="utf-8") as f:
json.dump(llama2_config_dict, f, indent=2)
print("Model config saved in {}".format(os.path.join(output_dir, CONFIG_NAME)))
2024-02-15 02:27:36 +08:00
def llamafy_baichuan2(
2024-08-09 19:16:23 +08:00
input_dir: str,
output_dir: str,
shard_size: str = "2GB",
save_safetensors: bool = True,
2024-02-15 02:27:36 +08:00
):
2024-06-15 17:54:33 +08:00
r"""
Converts the Baichuan2-7B model in the same format as LLaMA2-7B.
Usage: python llamafy_baichuan2.py --input_dir input --output_dir output
Converted model: https://huggingface.co/hiyouga/Baichuan2-7B-Base-LLaMAfied
"""
2023-10-08 22:05:36 +08:00
try:
os.makedirs(output_dir, exist_ok=False)
except Exception as e:
raise print("Output dir already exists", e)
2023-12-25 18:29:34 +08:00
save_weight(input_dir, output_dir, shard_size, save_safetensors)
2024-01-20 20:15:56 +08:00
save_config(input_dir, output_dir)
2023-09-08 22:59:41 +08:00
if __name__ == "__main__":
fire.Fire(llamafy_baichuan2)