forked from p04798526/LLaMA-Factory-Mirror
release v0.7.1
This commit is contained in:
parent
2a67ab3925
commit
1c910079d8
4
setup.py
4
setup.py
|
@ -5,9 +5,9 @@ from setuptools import find_packages, setup
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
with open(os.path.join("src", "llmtuner", "__init__.py"), "r", encoding="utf-8") as f:
|
with open(os.path.join("src", "llmtuner", "cli.py"), "r", encoding="utf-8") as f:
|
||||||
file_content = f.read()
|
file_content = f.read()
|
||||||
pattern = r"{0}\W*=\W*\"([^\"]+)\"".format("__version__")
|
pattern = r"{}\W*=\W*\"([^\"]+)\"".format("VERSION")
|
||||||
(version,) = re.findall(pattern, file_content)
|
(version,) = re.findall(pattern, file_content)
|
||||||
return version
|
return version
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
# Level: api, webui > chat, eval, train > data, model > extras, hparams
|
# Level: api, webui > chat, eval, train > data, model > extras, hparams
|
||||||
|
|
||||||
__version__ = "0.7.1.dev0"
|
from .cli import VERSION
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = VERSION
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import sys
|
import sys
|
||||||
from enum import Enum, unique
|
from enum import Enum, unique
|
||||||
|
|
||||||
from . import __version__
|
|
||||||
from .api.app import run_api
|
from .api.app import run_api
|
||||||
from .chat.chat_model import run_chat
|
from .chat.chat_model import run_chat
|
||||||
from .eval.evaluator import run_eval
|
from .eval.evaluator import run_eval
|
||||||
|
@ -9,17 +8,34 @@ from .train.tuner import export_model, run_exp
|
||||||
from .webui.interface import run_web_demo, run_web_ui
|
from .webui.interface import run_web_demo, run_web_ui
|
||||||
|
|
||||||
|
|
||||||
USAGE = """
|
USAGE = (
|
||||||
Usage:
|
"-" * 70
|
||||||
llamafactory-cli api -h: launch an API server
|
+ "\n"
|
||||||
llamafactory-cli chat -h: launch a chat interface in CLI
|
+ "| Usage: |\n"
|
||||||
llamafactory-cli eval -h: do evaluation
|
+ "| llamafactory-cli api -h: launch an OpenAI-style API server |\n"
|
||||||
llamafactory-cli export -h: merge LoRA adapters and export model
|
+ "| llamafactory-cli chat -h: launch a chat interface in CLI |\n"
|
||||||
llamafactory-cli train -h: do training
|
+ "| llamafactory-cli eval -h: evaluate models |\n"
|
||||||
llamafactory-cli webchat -h: launch a chat interface in Web UI
|
+ "| llamafactory-cli export -h: merge LoRA adapters and export model |\n"
|
||||||
llamafactory-cli webui: launch LlamaBoard
|
+ "| llamafactory-cli train -h: train models |\n"
|
||||||
llamafactory-cli version: show version info
|
+ "| llamafactory-cli webchat -h: launch a chat interface in Web UI |\n"
|
||||||
"""
|
+ "| llamafactory-cli webui: launch LlamaBoard |\n"
|
||||||
|
+ "| llamafactory-cli version: show version info |\n"
|
||||||
|
+ "-" * 70
|
||||||
|
)
|
||||||
|
|
||||||
|
VERSION = "0.7.1"
|
||||||
|
|
||||||
|
WELCOME = (
|
||||||
|
"-" * 58
|
||||||
|
+ "\n"
|
||||||
|
+ "| Welcome to LLaMA Factory, version {}".format(VERSION)
|
||||||
|
+ " " * (21 - len(VERSION))
|
||||||
|
+ "|\n|"
|
||||||
|
+ " " * 56
|
||||||
|
+ "|\n"
|
||||||
|
+ "| Project page: https://github.com/hiyouga/LLaMA-Factory |\n"
|
||||||
|
+ "-" * 58
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@unique
|
@unique
|
||||||
|
@ -31,7 +47,7 @@ class Command(str, Enum):
|
||||||
TRAIN = "train"
|
TRAIN = "train"
|
||||||
WEBDEMO = "webchat"
|
WEBDEMO = "webchat"
|
||||||
WEBUI = "webui"
|
WEBUI = "webui"
|
||||||
VERSION = "version"
|
VER = "version"
|
||||||
HELP = "help"
|
HELP = "help"
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,8 +67,8 @@ def main():
|
||||||
run_web_demo()
|
run_web_demo()
|
||||||
elif command == Command.WEBUI:
|
elif command == Command.WEBUI:
|
||||||
run_web_ui()
|
run_web_ui()
|
||||||
elif command == Command.VERSION:
|
elif command == Command.VER:
|
||||||
print("Welcome to LLaMA Factory, version {}".format(__version__))
|
print(WELCOME)
|
||||||
elif command == Command.HELP:
|
elif command == Command.HELP:
|
||||||
print(USAGE)
|
print(USAGE)
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in New Issue