ipex-llm/docker/llm/serving/xpu/docker/Dockerfile
Lilac09 052962dfa5 Using original fastchat and add bigdl worker in docker image (#9967)
* add vllm worker

* add options in entrypoint
2024-01-23 14:17:05 +08:00

18 lines
370 B
Docker

FROM intelanalytics/bigdl-llm-xpu:2.5.0-SNAPSHOT
ARG http_proxy
ARG https_proxy
# Disable pip's cache behavior
ARG PIP_NO_CACHE_DIR=false
COPY ./entrypoint.sh /opt/entrypoint.sh
# Install Serving Dependencies
RUN cd /llm && \
pip install --pre --upgrade bigdl-llm[serving] && \
chmod +x /opt/entrypoint.sh
WORKDIR /llm/
ENTRYPOINT [ "/opt/entrypoint.sh" ]