LLaMA-Factory-Mirror/setup.py

75 lines
2.5 KiB
Python
Raw Normal View History

2023-07-15 16:54:28 +08:00
import os
import re
2024-03-09 00:14:48 +08:00
from setuptools import find_packages, setup
2023-07-15 16:54:28 +08:00
def get_version():
with open(os.path.join("src", "llmtuner", "__init__.py"), "r", encoding="utf-8") as f:
file_content = f.read()
pattern = r"{0}\W*=\W*\"([^\"]+)\"".format("__version__")
2024-03-09 00:14:48 +08:00
(version,) = re.findall(pattern, file_content)
2023-07-15 16:54:28 +08:00
return version
def get_requires():
with open("requirements.txt", "r", encoding="utf-8") as f:
file_content = f.read()
lines = [line.strip() for line in file_content.strip().split("\n") if not line.startswith("#")]
return lines
2024-03-08 00:44:51 +08:00
extra_require = {
2024-04-03 18:14:24 +08:00
"deepspeed": ["deepspeed>=0.10.0"],
2024-03-08 00:44:51 +08:00
"metrics": ["nltk", "jieba", "rouge-chinese"],
2024-03-25 22:38:56 +08:00
"unsloth": ["torch==2.2.0", "unsloth[cu121-ampere-torch220]"],
2024-04-03 21:56:43 +08:00
"galore": ["galore-torch"],
2024-04-16 12:05:27 +08:00
"badam": ["torch>=2.1.0"],
2024-03-25 22:38:56 +08:00
"vllm": ["vllm>=0.3.3"],
2024-03-08 00:44:51 +08:00
"bitsandbytes": ["bitsandbytes>=0.39.0"],
"gptq": ["optimum>=1.16.0", "auto-gptq>=0.5.0"],
"awq": ["autoawq"],
2024-03-25 22:38:56 +08:00
"aqlm": ["aqlm[gpu]>=1.1.0"],
2024-03-08 01:23:00 +08:00
"qwen": ["tiktoken", "transformers_stream_generator"],
2024-04-11 20:08:51 +08:00
"modelscope": ["modelscope"],
2024-03-09 00:14:48 +08:00
"quality": ["ruff"],
2024-03-08 00:44:51 +08:00
}
2023-07-15 16:54:28 +08:00
def main():
setup(
name="llmtuner",
version=get_version(),
author="hiyouga",
author_email="hiyouga" "@" "buaa.edu.cn",
2023-10-13 13:53:43 +08:00
description="Easy-to-use LLM fine-tuning framework",
2023-07-15 16:54:28 +08:00
long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown",
keywords=["LLaMA", "BLOOM", "Falcon", "LLM", "ChatGPT", "transformer", "pytorch", "deep learning"],
license="Apache 2.0 License",
2023-10-13 13:53:43 +08:00
url="https://github.com/hiyouga/LLaMA-Factory",
2023-07-15 16:54:28 +08:00
package_dir={"": "src"},
packages=find_packages("src"),
python_requires=">=3.8.0",
install_requires=get_requires(),
2024-03-08 00:44:51 +08:00
extras_require=extra_require,
2023-07-15 16:54:28 +08:00
classifiers=[
2024-03-09 00:14:48 +08:00
"Development Status :: 4 - Beta",
2023-07-15 16:54:28 +08:00
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
2024-03-09 00:14:48 +08:00
"Programming Language :: Python :: 3.11",
2023-07-15 16:54:28 +08:00
"Topic :: Scientific/Engineering :: Artificial Intelligence",
2024-03-09 00:14:48 +08:00
],
2023-07-15 16:54:28 +08:00
)
if __name__ == "__main__":
main()