Merge pull request #9117 from Zhengjin-Wang/manually_build

add llm-serving-xpu on github action
This commit is contained in:
Lilac09 2023-10-10 10:09:06 +08:00 committed by GitHub
commit 6264381f2e
2 changed files with 58 additions and 8 deletions

View file

@ -12,6 +12,7 @@ on:
- all - all
- bigdl-llm-xpu - bigdl-llm-xpu
- bigdl-llm-cpu - bigdl-llm-cpu
- bigdl-llm-serving-xpu
- bigdl-llm-serving-cpu - bigdl-llm-serving-cpu
- bigdl-ppml-gramine-base - bigdl-ppml-gramine-base
- bigdl-ppml-trusted-bigdl-llm-gramine-base - bigdl-ppml-trusted-bigdl-llm-gramine-base
@ -78,8 +79,6 @@ jobs:
--build-arg http_proxy=${HTTP_PROXY} \ --build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \ --build-arg https_proxy=${HTTPS_PROXY} \
--build-arg no_proxy=${NO_PROXY} \ --build-arg no_proxy=${NO_PROXY} \
--build-arg BASE_IMAGE_NAME=${base_image} \
--build-arg BASE_IMAGE_TAG=${TAG} \
-t ${image}:${TAG} -f ./Dockerfile . -t ${image}:${TAG} -f ./Dockerfile .
sudo docker push ${image}:${TAG} sudo docker push ${image}:${TAG}
sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
@ -107,8 +106,33 @@ jobs:
--build-arg http_proxy=${HTTP_PROXY} \ --build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \ --build-arg https_proxy=${HTTPS_PROXY} \
--build-arg no_proxy=${NO_PROXY} \ --build-arg no_proxy=${NO_PROXY} \
--build-arg BASE_IMAGE_NAME=${base_image} \ -t ${image}:${TAG} -f ./Dockerfile .
--build-arg BASE_IMAGE_TAG=${TAG} \ sudo docker push ${image}:${TAG}
sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
sudo docker push 10.239.45.10/arda/${image}:${TAG}
sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
bigdl-llm-serving-xpu:
if: ${{ github.event.inputs.artifact == 'bigdl-llm-serving-xpu' || github.event.inputs.artifact == 'all' }}
runs-on: [self-hosted, Shire]
steps:
- uses: actions/checkout@v3
- name: docker login
run: |
docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
- name: bigdl-llm-serving-xpu
run: |
echo "##############################################################"
echo "####### bigdl-llm-serving-xpu ########"
echo "##############################################################"
export image=intelanalytics/bigdl-llm-serving-xpu
cd docker/llm/serving/xpu/docker
sudo docker build \
--no-cache=true \
--build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \
--build-arg no_proxy=${NO_PROXY} \
-t ${image}:${TAG} -f ./Dockerfile . -t ${image}:${TAG} -f ./Dockerfile .
sudo docker push ${image}:${TAG} sudo docker push ${image}:${TAG}
sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}

View file

@ -16,6 +16,7 @@ on:
- all - all
- bigdl-llm-xpu - bigdl-llm-xpu
- bigdl-llm-cpu - bigdl-llm-cpu
- bigdl-llm-serving-xpu
- bigdl-ppml-gramine-base - bigdl-ppml-gramine-base
- bigdl-ppml-trusted-bigdl-llm-gramine-base - bigdl-ppml-trusted-bigdl-llm-gramine-base
- bigdl-ppml-trusted-bigdl-llm-gramine-ref - bigdl-ppml-trusted-bigdl-llm-gramine-ref
@ -76,8 +77,6 @@ jobs:
--build-arg http_proxy=${HTTP_PROXY} \ --build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \ --build-arg https_proxy=${HTTPS_PROXY} \
--build-arg no_proxy=${NO_PROXY} \ --build-arg no_proxy=${NO_PROXY} \
--build-arg BASE_IMAGE_NAME=${base_image} \
--build-arg BASE_IMAGE_TAG=${TAG} \
-t ${image}:${TAG} -f ./Dockerfile . -t ${image}:${TAG} -f ./Dockerfile .
sudo docker push ${image}:${TAG} sudo docker push ${image}:${TAG}
sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
@ -107,8 +106,35 @@ jobs:
--build-arg http_proxy=${HTTP_PROXY} \ --build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \ --build-arg https_proxy=${HTTPS_PROXY} \
--build-arg no_proxy=${NO_PROXY} \ --build-arg no_proxy=${NO_PROXY} \
--build-arg BASE_IMAGE_NAME=${base_image} \ -t ${image}:${TAG} -f ./Dockerfile .
--build-arg BASE_IMAGE_TAG=${TAG} \ sudo docker push ${image}:${TAG}
sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
sudo docker push 10.239.45.10/arda/${image}:${TAG}
sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
bigdl-llm-serving-xpu:
if: ${{ github.event.inputs.artifact == 'bigdl-llm-serving-xpu' || github.event.inputs.artifact == 'all' }}
runs-on: [self-hosted, Shire]
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.sha }}
- name: docker login
run: |
docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
- name: bigdl-llm-serving-xpu
run: |
echo "##############################################################"
echo "####### bigdl-llm-serving-xpu ########"
echo "##############################################################"
export image=intelanalytics/bigdl-llm-serving-xpu
cd docker/llm/serving/xpu/docker
sudo docker build \
--no-cache=true \
--build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \
--build-arg no_proxy=${NO_PROXY} \
-t ${image}:${TAG} -f ./Dockerfile . -t ${image}:${TAG} -f ./Dockerfile .
sudo docker push ${image}:${TAG} sudo docker push ${image}:${TAG}
sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}