Fix Trivy Docker Image Vulnerabilities for BigDL Release 2.5.0 (#10447)
* Update pypi version to fix trivy issues * refine
This commit is contained in:
parent
f3fefdc9ce
commit
0e388f4b91
4 changed files with 11 additions and 17 deletions
|
|
@ -50,25 +50,14 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get update && \
|
|||
rm -rf ./BigDL && \
|
||||
# Fix vllm service
|
||||
pip install pydantic==1.10.11 && \
|
||||
# Install miniconda
|
||||
curl -LO "https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh" && \
|
||||
bash Miniconda3-latest-Linux-x86_64.sh -b && \
|
||||
rm -f Miniconda3-latest-Linux-x86_64.sh && \
|
||||
# Install environment for speculative
|
||||
/root/miniconda3/condabin/conda init bash && \
|
||||
/bin/bash -c "source /root/.bashrc" && \
|
||||
/root/miniconda3/condabin/conda create -n bigdl-speculative-py39 -y python=3.9
|
||||
RUN cp /root/miniconda3/condabin/conda /usr/bin && \
|
||||
conda init bash && \
|
||||
. ~/.bashrc && \
|
||||
conda activate bigdl-speculative-py39 && \
|
||||
# Install bigdl-llm
|
||||
cd /llm && \
|
||||
pip install --pre --upgrade bigdl-llm[all] && \
|
||||
# Fix CVE-2024-22195
|
||||
pip install Jinja2==3.1.3 && \
|
||||
pip install torch==2.2.0 torchvision==0.17.0 torchaudio==2.2.0 --index-url https://download.pytorch.org/whl/cpu && \
|
||||
pip install intel-extension-for-pytorch==2.2.0 && \
|
||||
pip install oneccl_bind_pt==2.2.0 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/cpu/us/ && \
|
||||
pip install transformers==4.36.2 && \
|
||||
pip install transformers_stream_generator && \
|
||||
echo "conda deactivate" >> /root/.bashrc
|
||||
pip install transformers==4.36.2
|
||||
|
||||
ENTRYPOINT ["/bin/bash"]
|
||||
|
|
|
|||
|
|
@ -35,7 +35,9 @@ RUN curl -fsSL https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-P
|
|||
python3 get-pip.py && \
|
||||
rm get-pip.py && \
|
||||
pip install --upgrade requests argparse urllib3 && \
|
||||
pip install --pre --upgrade bigdl-llm[xpu_2.1] -f https://developer.intel.com/ipex-whl-stable-xpu && \
|
||||
pip install --pre --upgrade bigdl-llm[xpu] -f https://developer.intel.com/ipex-whl-stable-xpu && \
|
||||
# Fix Trivy CVE Issues
|
||||
pip install transformers==4.36.2 && \
|
||||
pip install transformers_stream_generator einops tiktoken && \
|
||||
# Install opencl-related repos
|
||||
apt-get update && \
|
||||
|
|
|
|||
|
|
@ -13,7 +13,9 @@ ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /sbi
|
|||
# Install Serving Dependencies
|
||||
RUN cd /llm && \
|
||||
pip install --pre --upgrade bigdl-llm[serving] && \
|
||||
# Fix Qwen model adpater in fastchat
|
||||
# Fix Trivy CVE Issues
|
||||
pip install Jinja2==3.1.3 transformers==4.36.2 gradio==4.19.2 cryptography==42.0.4 && \
|
||||
# Fix Qwen model adpater in fastchat
|
||||
patch /usr/local/lib/python3.9/dist-packages/fastchat/model/model_adapter.py < /llm/model_adapter.py.patch && \
|
||||
chmod +x /opt/entrypoint.sh && \
|
||||
chmod +x /sbin/tini && \
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ COPY ./entrypoint.sh /opt/entrypoint.sh
|
|||
# Install Serving Dependencies
|
||||
RUN cd /llm && \
|
||||
pip install --pre --upgrade bigdl-llm[serving] && \
|
||||
pip install transformers==4.36.2 gradio==4.19.2 && \
|
||||
chmod +x /opt/entrypoint.sh
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue