2024-06-11 00:19:17 +08:00
|
|
|
# Use the NVIDIA official image with PyTorch 2.3.0
|
|
|
|
# https://docs.nvidia.com/deeplearning/frameworks/pytorch-release-notes/rel-24-02.html
|
|
|
|
FROM nvcr.io/nvidia/pytorch:24.02-py3
|
2024-03-08 10:47:28 +08:00
|
|
|
|
2024-06-11 00:19:17 +08:00
|
|
|
# Define installation arguments
|
|
|
|
ARG INSTALL_BNB=false
|
|
|
|
ARG INSTALL_VLLM=false
|
|
|
|
ARG INSTALL_DEEPSPEED=false
|
|
|
|
ARG PIP_INDEX=https://pypi.org/simple
|
|
|
|
|
|
|
|
# Set the working directory
|
2024-03-08 10:47:28 +08:00
|
|
|
WORKDIR /app
|
|
|
|
|
2024-06-11 00:19:17 +08:00
|
|
|
# Install the requirements
|
2024-03-08 10:47:28 +08:00
|
|
|
COPY requirements.txt /app/
|
2024-06-11 00:19:17 +08:00
|
|
|
RUN pip config set global.index-url $PIP_INDEX
|
|
|
|
RUN python -m pip install --upgrade pip
|
|
|
|
RUN python -m pip install -r requirements.txt
|
2024-03-08 10:47:28 +08:00
|
|
|
|
2024-06-11 00:19:17 +08:00
|
|
|
# Copy the rest of the application into the image
|
2024-03-08 10:47:28 +08:00
|
|
|
COPY . /app/
|
|
|
|
|
2024-06-11 00:19:17 +08:00
|
|
|
# Install the LLaMA Factory
|
|
|
|
RUN EXTRA_PACKAGES="metrics"; \
|
|
|
|
if [ "$INSTALL_BNB" = "true" ]; then \
|
|
|
|
EXTRA_PACKAGES="${EXTRA_PACKAGES},bitsandbytes"; \
|
|
|
|
fi; \
|
|
|
|
if [ "$INSTALL_VLLM" = "true" ]; then \
|
|
|
|
EXTRA_PACKAGES="${EXTRA_PACKAGES},vllm"; \
|
|
|
|
fi; \
|
|
|
|
if [ "$INSTALL_DEEPSPEED" = "true" ]; then \
|
|
|
|
EXTRA_PACKAGES="${EXTRA_PACKAGES},deepspeed"; \
|
|
|
|
fi; \
|
|
|
|
pip install -e .[$EXTRA_PACKAGES] && \
|
2024-06-12 16:50:11 +08:00
|
|
|
pip uninstall -y transformer-engine flash-attn
|
2024-06-11 00:19:17 +08:00
|
|
|
|
|
|
|
# Set up volumes
|
2024-03-08 10:47:28 +08:00
|
|
|
VOLUME [ "/root/.cache/huggingface/", "/app/data", "/app/output" ]
|
2024-06-11 00:19:17 +08:00
|
|
|
|
|
|
|
# Expose port 7860 for the LLaMA Board
|
2024-03-08 10:47:28 +08:00
|
|
|
EXPOSE 7860
|
|
|
|
|
2024-06-11 00:19:17 +08:00
|
|
|
# Expose port 8000 for the API service
|
|
|
|
EXPOSE 8000
|