2024-03-08 10:47:28 +08:00
|
|
|
services:
|
2024-06-11 00:19:17 +08:00
|
|
|
llamafactory:
|
2024-03-08 10:47:28 +08:00
|
|
|
build:
|
2024-06-25 00:46:47 +08:00
|
|
|
dockerfile: ./docker/docker-cuda/Dockerfile
|
|
|
|
context: ../..
|
2024-06-11 00:19:17 +08:00
|
|
|
args:
|
|
|
|
INSTALL_BNB: false
|
|
|
|
INSTALL_VLLM: false
|
|
|
|
INSTALL_DEEPSPEED: false
|
|
|
|
PIP_INDEX: https://pypi.org/simple
|
|
|
|
container_name: llamafactory
|
2024-03-08 10:47:28 +08:00
|
|
|
volumes:
|
2024-06-25 01:51:29 +08:00
|
|
|
- ./hf_cache:/root/.cache/huggingface
|
|
|
|
- ./ms_cache:/root/.cache/modelscope
|
2024-06-25 00:46:47 +08:00
|
|
|
- ./data:/app/data
|
|
|
|
- ./output:/app/output
|
2024-03-08 10:47:28 +08:00
|
|
|
ports:
|
|
|
|
- "7860:7860"
|
2024-06-11 00:19:17 +08:00
|
|
|
- "8000:8000"
|
2024-03-12 14:05:10 +08:00
|
|
|
ipc: host
|
2024-06-13 00:07:48 +08:00
|
|
|
tty: true
|
|
|
|
stdin_open: true
|
|
|
|
command: bash
|
2024-03-08 10:47:28 +08:00
|
|
|
deploy:
|
|
|
|
resources:
|
|
|
|
reservations:
|
|
|
|
devices:
|
|
|
|
- driver: nvidia
|
|
|
|
count: "all"
|
|
|
|
capabilities: [gpu]
|
|
|
|
restart: unless-stopped
|