From 2ead3f7d5469369195904d53b479b4d495179e23 Mon Sep 17 00:00:00 2001 From: Guancheng Fu <110874468+gc-fu@users.noreply.github.com> Date: Mon, 23 Oct 2023 15:53:30 +0800 Subject: [PATCH] add manually build (#9244) --- .../workflows/manually_build_for_testing.yml | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/.github/workflows/manually_build_for_testing.yml b/.github/workflows/manually_build_for_testing.yml index 9c729d48..3e272284 100644 --- a/.github/workflows/manually_build_for_testing.yml +++ b/.github/workflows/manually_build_for_testing.yml @@ -141,6 +141,35 @@ jobs: sudo docker push 10.239.45.10/arda/${image}:${TAG} sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + bigdl-llm-serving-cpu: + if: ${{ github.event.inputs.artifact == 'bigdl-llm-serving-cpu' || github.event.inputs.artifact == 'all' }} + runs-on: [self-hosted, Shire] + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.inputs.sha }} + - name: docker login + run: | + docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD} + - name: bigdl-llm-serving-cpu + run: | + echo "##############################################################" + echo "####### bigdl-llm-serving-cpu ########" + echo "##############################################################" + export image=intelanalytics/bigdl-llm-serving-cpu + cd docker/llm/serving/cpu/docker + sudo docker build \ + --no-cache=true \ + --build-arg http_proxy=${HTTP_PROXY} \ + --build-arg https_proxy=${HTTPS_PROXY} \ + --build-arg no_proxy=${NO_PROXY} \ + -t ${image}:${TAG} -f ./Dockerfile . + sudo docker push ${image}:${TAG} + sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + sudo docker push 10.239.45.10/arda/${image}:${TAG} + sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + bigdl-ppml-gramine-base: if: ${{ github.event.inputs.artifact == 'bigdl-ppml-gramine-base' || github.event.inputs.artifact == 'all' }} runs-on: [self-hosted, Shire]