This commit is contained in:
tastelikefeet 2023-12-01 17:27:00 +08:00
parent 5aa6751e52
commit d9e52957e2
2 changed files with 8 additions and 3 deletions

View File

@ -20,7 +20,7 @@ SUBJECTS = ["Average", "STEM", "Social Sciences", "Humanities", "Other"]
SUPPORTED_MODELS = OrderedDict()
MODELSCOPE_MODELS = OrderedDict()
ALL_OFFICIAL_MODELS = OrderedDict()
TRAINING_STAGES = {
"Supervised Fine-Tuning": "sft",
@ -43,12 +43,14 @@ def register_model_group(
else:
assert prefix == name.split("-")[0], "prefix should be identical."
ALL_OFFICIAL_MODELS[name] = [path] if isinstance(path, str) else list(path.values())
if not int(os.environ.get('USE_MODELSCOPE_HUB', '0')):
# If path is a string, we treat it as a huggingface model-id by default.
SUPPORTED_MODELS[name] = path["hf"] if isinstance(path, dict) else path
elif isinstance(path, dict) and "ms" in path:
# Use ModelScope modelhub
SUPPORTED_MODELS[name] = path["ms"]
print(f'Supported models add {name}/{SUPPORTED_MODELS[name]}')
if module is not None:
DEFAULT_MODULE[prefix] = module
if template is not None:

View File

@ -11,7 +11,7 @@ from transformers.utils import (
ADAPTER_SAFE_WEIGHTS_NAME
)
from llmtuner.extras.constants import DEFAULT_MODULE, DEFAULT_TEMPLATE, SUPPORTED_MODELS, TRAINING_STAGES
from llmtuner.extras.constants import DEFAULT_MODULE, DEFAULT_TEMPLATE, SUPPORTED_MODELS, ALL_OFFICIAL_MODELS, TRAINING_STAGES
DEFAULT_CACHE_DIR = "cache"
@ -58,7 +58,10 @@ def save_config(lang: str, model_name: Optional[str] = None, model_path: Optiona
def get_model_path(model_name: str) -> str:
user_config = load_config()
return user_config["path_dict"].get(model_name, None) or SUPPORTED_MODELS.get(model_name, "")
cached_path = user_config["path_dict"].get(model_name, None)
if cached_path in ALL_OFFICIAL_MODELS.get(model_name, []):
cached_path = None
return cached_path or SUPPORTED_MODELS.get(model_name, "")
def get_prefix(model_name: str) -> str: