diff --git a/.github/workflows/manually_build.yml b/.github/workflows/manually_build.yml index 3f3741bc..24603c73 100644 --- a/.github/workflows/manually_build.yml +++ b/.github/workflows/manually_build.yml @@ -10,6 +10,7 @@ on: type: choice options: - all + - bigdl-llm-xpu - bigdl-llm-cpu - bigdl-ppml-gramine-base - bigdl-ppml-trusted-bigdl-llm-gramine-base @@ -55,6 +56,34 @@ permissions: packages: write jobs: + bigdl-llm-xpu: + if: ${{ github.event.inputs.artifact == 'bigdl-llm-xpu' || github.event.inputs.artifact == 'all' }} + runs-on: [self-hosted, Shire] + steps: + - uses: actions/checkout@v3 + - name: docker login + run: | + docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD} + - name: bigdl-llm-xpu + run: | + echo "##############################################################" + echo "####### bigdl-llm-xpu ########" + echo "##############################################################" + export image=intelanalytics/bigdl-llm-xpu + cd docker/llm/inference/xpu/docker + sudo docker build \ + --no-cache=true \ + --build-arg http_proxy=${HTTP_PROXY} \ + --build-arg https_proxy=${HTTPS_PROXY} \ + --build-arg no_proxy=${NO_PROXY} \ + --build-arg BASE_IMAGE_NAME=${base_image} \ + --build-arg BASE_IMAGE_TAG=${TAG} \ + -t ${image}:${TAG} -f ./Dockerfile . + sudo docker push ${image}:${TAG} + sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + sudo docker push 10.239.45.10/arda/${image}:${TAG} + sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + bigdl-llm-cpu: if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }} runs-on: [self-hosted, Shire] diff --git a/.github/workflows/manually_build_for_testing.yml b/.github/workflows/manually_build_for_testing.yml index 1b723025..b0cbebc1 100644 --- a/.github/workflows/manually_build_for_testing.yml +++ b/.github/workflows/manually_build_for_testing.yml @@ -14,6 +14,7 @@ on: type: choice options: - all + - bigdl-llm-xpu - bigdl-llm-cpu - bigdl-ppml-gramine-base - bigdl-ppml-trusted-bigdl-llm-gramine-base @@ -52,6 +53,36 @@ permissions: packages: write jobs: + bigdl-llm-xpu: + if: ${{ github.event.inputs.artifact == 'bigdl-llm-xpu' || github.event.inputs.artifact == 'all' }} + runs-on: [self-hosted, Shire] + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.inputs.sha }} + - name: docker login + run: | + docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD} + - name: bigdl-llm-xpu + run: | + echo "##############################################################" + echo "####### bigdl-llm-xpu ########" + echo "##############################################################" + export image=intelanalytics/bigdl-llm-xpu + cd docker/llm/inference/xpu/docker + sudo docker build \ + --no-cache=true \ + --build-arg http_proxy=${HTTP_PROXY} \ + --build-arg https_proxy=${HTTPS_PROXY} \ + --build-arg no_proxy=${NO_PROXY} \ + --build-arg BASE_IMAGE_NAME=${base_image} \ + --build-arg BASE_IMAGE_TAG=${TAG} \ + -t ${image}:${TAG} -f ./Dockerfile . + sudo docker push ${image}:${TAG} + sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + sudo docker push 10.239.45.10/arda/${image}:${TAG} + sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + bigdl-llm-cpu: if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }} runs-on: [self-hosted, Shire] diff --git a/docker/llm/inference/xpu/docker/Dockerfile b/docker/llm/inference/xpu/docker/Dockerfile index 0366c933..0b7da551 100644 --- a/docker/llm/inference/xpu/docker/Dockerfile +++ b/docker/llm/inference/xpu/docker/Dockerfile @@ -1,7 +1,7 @@ FROM intel/oneapi-basekit:2023.2.1-devel-ubuntu22.04 -ENV http_proxy $HTTP_PROXY -ENV https_proxy $HTTP_PROXY +ARG http_proxy +ARG https_proxy ENV TZ=Asia/Shanghai