diff --git a/.github/workflows/manually_build.yml b/.github/workflows/manually_build.yml index 3f3741bc..28a8bdfe 100644 --- a/.github/workflows/manually_build.yml +++ b/.github/workflows/manually_build.yml @@ -1,6 +1,10 @@ name: Manually Build on: + pull_request: + branches: [ main ] + paths: + - '.github/workflows/manually_build.yml' workflow_dispatch: inputs: artifact: @@ -10,6 +14,7 @@ on: type: choice options: - all + - bigdl-llm-xpu - bigdl-llm-cpu - bigdl-ppml-gramine-base - bigdl-ppml-trusted-bigdl-llm-gramine-base @@ -48,13 +53,42 @@ on: type: string env: - TAG: ${{ github.event.inputs.tag }} + TAG: zhengjin-test + # TAG: ${{ github.event.inputs.tag }} permissions: contents: read packages: write jobs: + bigdl-llm-xpu: + if: ${{ github.event.inputs.artifact == 'bigdl-llm-xpu' || github.event.inputs.artifact == 'all' }} + runs-on: [self-hosted, Shire] + steps: + - uses: actions/checkout@v3 + - name: docker login + run: | + docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD} + - name: bigdl-llm-xpu + run: | + echo "##############################################################" + echo "####### bigdl-llm-xpu ########" + echo "##############################################################" + export image=intelanalytics/bigdl-llm-xpu + cd docker/llm/inference/xpu/docker + sudo docker build \ + --no-cache=true \ + --build-arg http_proxy=${HTTP_PROXY} \ + --build-arg https_proxy=${HTTPS_PROXY} \ + --build-arg no_proxy=${NO_PROXY} \ + --build-arg BASE_IMAGE_NAME=${base_image} \ + --build-arg BASE_IMAGE_TAG=${TAG} \ + -t ${image}:${TAG} -f ./Dockerfile . + sudo docker push ${image}:${TAG} + sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + sudo docker push 10.239.45.10/arda/${image}:${TAG} + sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + bigdl-llm-cpu: if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }} runs-on: [self-hosted, Shire] diff --git a/.github/workflows/manually_build_for_testing.yml b/.github/workflows/manually_build_for_testing.yml index 1b723025..b0cbebc1 100644 --- a/.github/workflows/manually_build_for_testing.yml +++ b/.github/workflows/manually_build_for_testing.yml @@ -14,6 +14,7 @@ on: type: choice options: - all + - bigdl-llm-xpu - bigdl-llm-cpu - bigdl-ppml-gramine-base - bigdl-ppml-trusted-bigdl-llm-gramine-base @@ -52,6 +53,36 @@ permissions: packages: write jobs: + bigdl-llm-xpu: + if: ${{ github.event.inputs.artifact == 'bigdl-llm-xpu' || github.event.inputs.artifact == 'all' }} + runs-on: [self-hosted, Shire] + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.inputs.sha }} + - name: docker login + run: | + docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD} + - name: bigdl-llm-xpu + run: | + echo "##############################################################" + echo "####### bigdl-llm-xpu ########" + echo "##############################################################" + export image=intelanalytics/bigdl-llm-xpu + cd docker/llm/inference/xpu/docker + sudo docker build \ + --no-cache=true \ + --build-arg http_proxy=${HTTP_PROXY} \ + --build-arg https_proxy=${HTTPS_PROXY} \ + --build-arg no_proxy=${NO_PROXY} \ + --build-arg BASE_IMAGE_NAME=${base_image} \ + --build-arg BASE_IMAGE_TAG=${TAG} \ + -t ${image}:${TAG} -f ./Dockerfile . + sudo docker push ${image}:${TAG} + sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + sudo docker push 10.239.45.10/arda/${image}:${TAG} + sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + bigdl-llm-cpu: if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }} runs-on: [self-hosted, Shire]