Add bigdl inference cpu image build

This commit is contained in:
Wang 2023-09-26 14:03:55 +08:00
parent 7a69bee8d0
commit 390c90551e
2 changed files with 66 additions and 1 deletions

View file

@ -1,6 +1,10 @@
name: Manually Build name: Manually Build
on: on:
pull_request:
branches: [ main ]
paths:
- '.github/workflows/manually_build.yml'
workflow_dispatch: workflow_dispatch:
inputs: inputs:
artifact: artifact:
@ -10,6 +14,7 @@ on:
type: choice type: choice
options: options:
- all - all
- bigdl-llm-xpu
- bigdl-llm-cpu - bigdl-llm-cpu
- bigdl-ppml-gramine-base - bigdl-ppml-gramine-base
- bigdl-ppml-trusted-bigdl-llm-gramine-base - bigdl-ppml-trusted-bigdl-llm-gramine-base
@ -48,13 +53,42 @@ on:
type: string type: string
env: env:
TAG: ${{ github.event.inputs.tag }} TAG: zhengjin-test
# TAG: ${{ github.event.inputs.tag }}
permissions: permissions:
contents: read contents: read
packages: write packages: write
jobs: jobs:
bigdl-llm-xpu:
if: ${{ github.event.inputs.artifact == 'bigdl-llm-xpu' || github.event.inputs.artifact == 'all' }}
runs-on: [self-hosted, Shire]
steps:
- uses: actions/checkout@v3
- name: docker login
run: |
docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
- name: bigdl-llm-xpu
run: |
echo "##############################################################"
echo "####### bigdl-llm-xpu ########"
echo "##############################################################"
export image=intelanalytics/bigdl-llm-xpu
cd docker/llm/inference/xpu/docker
sudo docker build \
--no-cache=true \
--build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \
--build-arg no_proxy=${NO_PROXY} \
--build-arg BASE_IMAGE_NAME=${base_image} \
--build-arg BASE_IMAGE_TAG=${TAG} \
-t ${image}:${TAG} -f ./Dockerfile .
sudo docker push ${image}:${TAG}
sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
sudo docker push 10.239.45.10/arda/${image}:${TAG}
sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
bigdl-llm-cpu: bigdl-llm-cpu:
if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }} if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }}
runs-on: [self-hosted, Shire] runs-on: [self-hosted, Shire]

View file

@ -14,6 +14,7 @@ on:
type: choice type: choice
options: options:
- all - all
- bigdl-llm-xpu
- bigdl-llm-cpu - bigdl-llm-cpu
- bigdl-ppml-gramine-base - bigdl-ppml-gramine-base
- bigdl-ppml-trusted-bigdl-llm-gramine-base - bigdl-ppml-trusted-bigdl-llm-gramine-base
@ -52,6 +53,36 @@ permissions:
packages: write packages: write
jobs: jobs:
bigdl-llm-xpu:
if: ${{ github.event.inputs.artifact == 'bigdl-llm-xpu' || github.event.inputs.artifact == 'all' }}
runs-on: [self-hosted, Shire]
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.sha }}
- name: docker login
run: |
docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
- name: bigdl-llm-xpu
run: |
echo "##############################################################"
echo "####### bigdl-llm-xpu ########"
echo "##############################################################"
export image=intelanalytics/bigdl-llm-xpu
cd docker/llm/inference/xpu/docker
sudo docker build \
--no-cache=true \
--build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \
--build-arg no_proxy=${NO_PROXY} \
--build-arg BASE_IMAGE_NAME=${base_image} \
--build-arg BASE_IMAGE_TAG=${TAG} \
-t ${image}:${TAG} -f ./Dockerfile .
sudo docker push ${image}:${TAG}
sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
sudo docker push 10.239.45.10/arda/${image}:${TAG}
sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
bigdl-llm-cpu: bigdl-llm-cpu:
if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }} if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }}
runs-on: [self-hosted, Shire] runs-on: [self-hosted, Shire]