update readme
This commit is contained in:
parent
fc7f1cc365
commit
66b0fe4e96
10
README.md
10
README.md
|
@ -327,8 +327,8 @@ To enable FlashAttention-2 on the Windows platform, you need to install the prec
|
||||||
#### Use local environment
|
#### Use local environment
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
CUDA_VISIBLE_DEVICES=0 python src/train_web.py
|
export CUDA_VISIBLE_DEVICES=0 # `set CUDA_VISIBLE_DEVICES=0` for Windows
|
||||||
# or CUDA_VISIBLE_DEVICES=0 python -m llmtuner.webui.interface
|
python src/train_web.py # or python -m llmtuner.webui.interface
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Use Docker
|
#### Use Docker
|
||||||
|
@ -370,10 +370,8 @@ Use `python src/train_bash.py -h` to display arguments description.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
CUDA_VISIBLE_DEVICES=0 API_PORT=8000 python src/api_demo.py \
|
CUDA_VISIBLE_DEVICES=0 API_PORT=8000 python src/api_demo.py \
|
||||||
--model_name_or_path path_to_model \
|
--model_name_or_path mistralai/Mistral-7B-Instruct-v0.2 \
|
||||||
--adapter_name_or_path path_to_lora_adapter \
|
--template mistral \
|
||||||
--template default \
|
|
||||||
--finetuning_type lora \
|
|
||||||
--infer_backend vllm
|
--infer_backend vllm
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
10
README_zh.md
10
README_zh.md
|
@ -327,8 +327,8 @@ pip install https://github.com/jllllll/bitsandbytes-windows-webui/releases/downl
|
||||||
#### 使用本地环境
|
#### 使用本地环境
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
CUDA_VISIBLE_DEVICES=0 python src/train_web.py
|
export CUDA_VISIBLE_DEVICES=0 # Windows 使用 `set CUDA_VISIBLE_DEVICES=0`
|
||||||
# 或 CUDA_VISIBLE_DEVICES=0 python -m llmtuner.webui.interface
|
python src/train_web.py # 或 python -m llmtuner.webui.interface
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 使用 Docker
|
#### 使用 Docker
|
||||||
|
@ -370,10 +370,8 @@ docker compose -f ./docker-compose.yml up -d
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
CUDA_VISIBLE_DEVICES=0 API_PORT=8000 python src/api_demo.py \
|
CUDA_VISIBLE_DEVICES=0 API_PORT=8000 python src/api_demo.py \
|
||||||
--model_name_or_path path_to_model \
|
--model_name_or_path mistralai/Mistral-7B-Instruct-v0.2 \
|
||||||
--adapter_name_or_path path_to_lora_adapter \
|
--template mistral \
|
||||||
--template default \
|
|
||||||
--finetuning_type lora \
|
|
||||||
--infer_backend vllm
|
--infer_backend vllm
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue