ipex-llm/docker/llm/inference/cpu/docker/Dockerfile
Shaojun Liu 9760ffc256
Fix SDLe CT222 Vulnerabilities (#11237)
* fix ct222 vuln

* update

* fix

* update ENTRYPOINT

* revert ENTRYPOINT

* Fix CT222 Vulns

* fix

* revert changes

* fix

* revert

* add sudo permission to ipex-llm user

* do not use ipex-llm user
2024-06-13 15:31:22 +08:00

67 lines
2.8 KiB
Docker

FROM ubuntu:22.04
ARG http_proxy
ARG https_proxy
ARG PIP_NO_CACHE_DIR=false
ARG DEBIAN_FRONTEND=noninteractive
ENV PYTHONUNBUFFERED=1
COPY ./start-notebook.sh /llm/start-notebook.sh
# Update the software sources
RUN env DEBIAN_FRONTEND=noninteractive apt-get update && \
# Install essential packages
apt-get install -y --no-install-recommends libunwind8-dev vim less && \
# Install git, curl, and wget
apt-get install -y --no-install-recommends git curl wget && \
# Install Python 3.11
# Install Python 3.11
apt-get install -y --no-install-recommends python3.11 && \
# Install Python 3.11 development and utility packages
apt-get install -y --no-install-recommends python3-pip python3.11-dev python3-wheel python3.11-distutils && \
# Remove the original /usr/bin/python3 symbolic link
rm /usr/bin/python3 && \
# Create a symbolic link pointing to Python 3.11 at /usr/bin/python3
ln -s /usr/bin/python3.11 /usr/bin/python3 && \
# Create a symbolic link pointing to /usr/bin/python3 at /usr/bin/python
ln -s /usr/bin/python3 /usr/bin/python && \
# Download and install pip, install FastChat from source requires PEP 660 support
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
python3 get-pip.py && \
rm get-pip.py && \
pip install --upgrade requests argparse urllib3 && \
# Download ipex-llm-tutorial
pip install --upgrade jupyterlab && \
git clone https://github.com/intel-analytics/ipex-llm-tutorial /llm/ipex-llm-tutorial && \
chmod +x /llm/start-notebook.sh && \
# Download all-in-one benchmark
git clone https://github.com/intel-analytics/IPEX-LLM && \
cp -r ./IPEX-LLM/python/llm/dev/benchmark/ /llm/benchmark && \
# Copy chat.py script
pip install --upgrade colorama && \
cp -r ./IPEX-LLM/python/llm/portable-zip/ /llm/portable-zip && \
# Install all-in-one dependencies
apt-get install -y --no-install-recommends numactl && \
pip install --upgrade omegaconf && \
pip install --upgrade pandas && \
# Install vllm dependencies
pip install --upgrade fastapi && \
pip install --upgrade "uvicorn[standard]" && \
# Add Qwen support
pip install --upgrade transformers_stream_generator einops && \
# Copy vLLM-Serving
cp -r ./IPEX-LLM/python/llm/example/CPU/vLLM-Serving/ /llm/vLLM-Serving && \
rm -rf ./IPEX-LLM && \
# Fix vllm service
pip install pydantic==1.10.11 && \
# Install ipex-llm
pip install --pre --upgrade ipex-llm[all] && \
# Fix CVE-2024-22195
pip install Jinja2==3.1.3 && \
pip install torch==2.2.0 torchvision==0.17.0 torchaudio==2.2.0 --index-url https://download.pytorch.org/whl/cpu && \
pip install intel-extension-for-pytorch==2.2.0 && \
pip install oneccl_bind_pt==2.2.0 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/cpu/us/ && \
pip install transformers==4.36.2
ENTRYPOINT ["/bin/bash"]