From 66b0fe4e964ad4d882935910f5e512666c54c2b2 Mon Sep 17 00:00:00 2001 From: hiyouga Date: Tue, 2 Apr 2024 22:17:48 +0800 Subject: [PATCH] update readme --- README.md | 10 ++++------ README_zh.md | 10 ++++------ 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 6dab2c93..1b218418 100644 --- a/README.md +++ b/README.md @@ -327,8 +327,8 @@ To enable FlashAttention-2 on the Windows platform, you need to install the prec #### Use local environment ```bash -CUDA_VISIBLE_DEVICES=0 python src/train_web.py -# or CUDA_VISIBLE_DEVICES=0 python -m llmtuner.webui.interface +export CUDA_VISIBLE_DEVICES=0 # `set CUDA_VISIBLE_DEVICES=0` for Windows +python src/train_web.py # or python -m llmtuner.webui.interface ``` #### Use Docker @@ -370,10 +370,8 @@ Use `python src/train_bash.py -h` to display arguments description. ```bash CUDA_VISIBLE_DEVICES=0 API_PORT=8000 python src/api_demo.py \ - --model_name_or_path path_to_model \ - --adapter_name_or_path path_to_lora_adapter \ - --template default \ - --finetuning_type lora \ + --model_name_or_path mistralai/Mistral-7B-Instruct-v0.2 \ + --template mistral \ --infer_backend vllm ``` diff --git a/README_zh.md b/README_zh.md index c62c212c..a8390f21 100644 --- a/README_zh.md +++ b/README_zh.md @@ -327,8 +327,8 @@ pip install https://github.com/jllllll/bitsandbytes-windows-webui/releases/downl #### 使用本地环境 ```bash -CUDA_VISIBLE_DEVICES=0 python src/train_web.py -# 或 CUDA_VISIBLE_DEVICES=0 python -m llmtuner.webui.interface +export CUDA_VISIBLE_DEVICES=0 # Windows 使用 `set CUDA_VISIBLE_DEVICES=0` +python src/train_web.py # 或 python -m llmtuner.webui.interface ``` #### 使用 Docker @@ -370,10 +370,8 @@ docker compose -f ./docker-compose.yml up -d ```bash CUDA_VISIBLE_DEVICES=0 API_PORT=8000 python src/api_demo.py \ - --model_name_or_path path_to_model \ - --adapter_name_or_path path_to_lora_adapter \ - --template default \ - --finetuning_type lora \ + --model_name_or_path mistralai/Mistral-7B-Instruct-v0.2 \ + --template mistral \ --infer_backend vllm ```