Add bigdl llm cpu image build (#9047)
* modify Dockerfile * add README.md * add README.md * Modify Dockerfile * Add bigdl inference cpu image build * Add bigdl llm cpu image build * Add bigdl llm cpu image build * Add bigdl llm cpu image build
This commit is contained in:
		
							parent
							
								
									a717352c59
								
							
						
					
					
						commit
						9ac950fa52
					
				
					 3 changed files with 67 additions and 3 deletions
				
			
		
							
								
								
									
										29
									
								
								.github/workflows/manually_build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										29
									
								
								.github/workflows/manually_build.yml
									
									
									
									
										vendored
									
									
								
							| 
						 | 
				
			
			@ -10,6 +10,7 @@ on:
 | 
			
		|||
        type: choice
 | 
			
		||||
        options:
 | 
			
		||||
        - all
 | 
			
		||||
        - bigdl-llm-cpu
 | 
			
		||||
        - bigdl-ppml-gramine-base
 | 
			
		||||
        - bigdl-ppml-trusted-bigdl-llm-gramine-base
 | 
			
		||||
        - bigdl-ppml-trusted-bigdl-llm-gramine-ref
 | 
			
		||||
| 
						 | 
				
			
			@ -54,6 +55,34 @@ permissions:
 | 
			
		|||
  packages: write
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  bigdl-llm-cpu:
 | 
			
		||||
    if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }}
 | 
			
		||||
    runs-on: [self-hosted, Shire]
 | 
			
		||||
    steps:
 | 
			
		||||
    - uses: actions/checkout@v3
 | 
			
		||||
    - name: docker login
 | 
			
		||||
      run: |
 | 
			
		||||
        docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
 | 
			
		||||
    - name: bigdl-llm-cpu
 | 
			
		||||
      run: |
 | 
			
		||||
        echo "##############################################################"
 | 
			
		||||
        echo "####### bigdl-llm-cpu ########"
 | 
			
		||||
        echo "##############################################################"
 | 
			
		||||
        export image=intelanalytics/bigdl-llm-cpu
 | 
			
		||||
        cd docker/llm/inference/cpu/docker
 | 
			
		||||
        sudo docker build \
 | 
			
		||||
          --no-cache=true \
 | 
			
		||||
          --build-arg http_proxy=${HTTP_PROXY} \
 | 
			
		||||
          --build-arg https_proxy=${HTTPS_PROXY} \
 | 
			
		||||
          --build-arg no_proxy=${NO_PROXY} \
 | 
			
		||||
          --build-arg BASE_IMAGE_NAME=${base_image} \
 | 
			
		||||
          --build-arg BASE_IMAGE_TAG=${TAG} \
 | 
			
		||||
          -t ${image}:${TAG} -f ./Dockerfile .
 | 
			
		||||
        sudo docker push ${image}:${TAG}
 | 
			
		||||
        sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
 | 
			
		||||
        sudo docker push 10.239.45.10/arda/${image}:${TAG}
 | 
			
		||||
        sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
 | 
			
		||||
 | 
			
		||||
  bigdl-ppml-gramine-base:
 | 
			
		||||
    if: ${{ github.event.inputs.artifact == 'bigdl-ppml-gramine-base' || github.event.inputs.artifact == 'all' }}
 | 
			
		||||
    runs-on: [self-hosted, Shire]
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
							
								
								
									
										31
									
								
								.github/workflows/manually_build_for_testing.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										31
									
								
								.github/workflows/manually_build_for_testing.yml
									
									
									
									
										vendored
									
									
								
							| 
						 | 
				
			
			@ -14,6 +14,7 @@ on:
 | 
			
		|||
        type: choice
 | 
			
		||||
        options:
 | 
			
		||||
        - all
 | 
			
		||||
        - bigdl-llm-cpu
 | 
			
		||||
        - bigdl-ppml-gramine-base
 | 
			
		||||
        - bigdl-ppml-trusted-bigdl-llm-gramine-base
 | 
			
		||||
        - bigdl-ppml-trusted-bigdl-llm-gramine-ref
 | 
			
		||||
| 
						 | 
				
			
			@ -51,6 +52,36 @@ permissions:
 | 
			
		|||
  packages: write
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
    bigdl-llm-cpu:
 | 
			
		||||
    if: ${{ github.event.inputs.artifact == 'bigdl-llm-cpu' || github.event.inputs.artifact == 'all' }}
 | 
			
		||||
    runs-on: [self-hosted, Shire]
 | 
			
		||||
    steps:
 | 
			
		||||
    - uses: actions/checkout@v3
 | 
			
		||||
      with:
 | 
			
		||||
        ref: ${{ github.event.inputs.sha }}
 | 
			
		||||
    - name: docker login
 | 
			
		||||
      run: |
 | 
			
		||||
        docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
 | 
			
		||||
    - name: bigdl-llm-cpu
 | 
			
		||||
      run: |
 | 
			
		||||
        echo "##############################################################"
 | 
			
		||||
        echo "####### bigdl-llm-cpu ########"
 | 
			
		||||
        echo "##############################################################"
 | 
			
		||||
        export image=intelanalytics/bigdl-llm-cpu
 | 
			
		||||
        cd docker/llm/inference/cpu/docker
 | 
			
		||||
        sudo docker build \
 | 
			
		||||
          --no-cache=true \
 | 
			
		||||
          --build-arg http_proxy=${HTTP_PROXY} \
 | 
			
		||||
          --build-arg https_proxy=${HTTPS_PROXY} \
 | 
			
		||||
          --build-arg no_proxy=${NO_PROXY} \
 | 
			
		||||
          --build-arg BASE_IMAGE_NAME=${base_image} \
 | 
			
		||||
          --build-arg BASE_IMAGE_TAG=${TAG} \
 | 
			
		||||
          -t ${image}:${TAG} -f ./Dockerfile .
 | 
			
		||||
        sudo docker push ${image}:${TAG}
 | 
			
		||||
        sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
 | 
			
		||||
        sudo docker push 10.239.45.10/arda/${image}:${TAG}
 | 
			
		||||
        sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG}
 | 
			
		||||
 | 
			
		||||
  bigdl-ppml-gramine-base:
 | 
			
		||||
    if: ${{ github.event.inputs.artifact == 'bigdl-ppml-gramine-base' || github.event.inputs.artifact == 'all' }}
 | 
			
		||||
    runs-on: [self-hosted, Shire]
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -18,7 +18,11 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get update && \
 | 
			
		|||
    python3 get-pip.py && \
 | 
			
		||||
    rm get-pip.py && \
 | 
			
		||||
    pip install --upgrade requests argparse urllib3 && \
 | 
			
		||||
    pip3 install --no-cache-dir --upgrade torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu && \
 | 
			
		||||
    pip install --pre --upgrade bigdl-llm[all] && \
 | 
			
		||||
    pip install --pre --upgrade bigdl-nano
 | 
			
		||||
    pip install --pre --upgrade bigdl-nano && \
 | 
			
		||||
# Download chat.py script
 | 
			
		||||
    wget -P /root https://raw.githubusercontent.com/intel-analytics/BigDL/main/python/llm/portable-executable/chat.py && \
 | 
			
		||||
    export PYTHONUNBUFFERED=1
 | 
			
		||||
 | 
			
		||||
ENTRYPOINT ["/bin/bash"]
 | 
			
		||||
		Loading…
	
		Reference in a new issue