[PPML] Upgrade PyTorch version to 1.13.0 (#6638)

* Make gramine to generate patched OpenMP

* cp libgomp to /usr/local/lib/

* Move gopenmp to a different place

* add patch to use gramine version openmp

* Change base Dockerfile

* Applying optimization recommended by PyTorch tutorial

* Ready for test pert_ipex.py

* remove extra space

* Fix dockerfile to use PyTorch 1.13.0

* Pin markupsafe to 2.0.1 after installing bigdl-nano, may need changes?

* Change test workflow
This commit is contained in:
Guancheng Fu 2022-11-21 07:02:30 -05:00 committed by GitHub
parent bb3889958c
commit 1b72f1b38f

View file

@ -73,59 +73,7 @@ jobs:
sudo docker push ${base_image}:${TAG}
sudo docker tag ${base_image}:${TAG} 10.239.45.10/arda/${base_image}:${TAG}
sudo docker push 10.239.45.10/arda/${base_image}:${TAG}
echo "########################################################"
echo "######### bigdl-ppml-trusted-bigdata-gramine ###########"
echo "########################################################"
cd ../trusted-bigdata
export bigdata_base_image=intelanalytics/bigdl-ppml-trusted-bigdata-gramine-base
sudo docker build \
--build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \
--build-arg HTTP_PROXY_HOST=${HTTP_PROXY_HOST_2} \
--build-arg HTTP_PROXY_PORT=${HTTP_PROXY_PORT_2} \
--build-arg HTTPS_PROXY_HOST=${HTTP_PROXY_HOST_2} \
--build-arg HTTPS_PROXY_PORT=${HTTP_PROXY_PORT_3} \
--build-arg JDK_VERSION=8u192 \
--build-arg JDK_URL=${JDK_URL} \
--build-arg SPARK_JAR_REPO_URL=${SPARK_JAR_REPO_URL} \
--build-arg no_proxy=${LOCAL_IP} \
--build-arg BASE_IMAGE_NAME=${base_image} \
--build-arg BASE_IMAGE_TAG=${TAG} \
-t ${bigdata_base_image}:${TAG} -f ./Dockerfile .
sudo docker push ${bigdata_base_image}:${TAG}
sudo docker tag ${bigdata_base_image}:${TAG} 10.239.45.10/arda/${bigdata_base_image}:${TAG}
sudo docker push 10.239.45.10/arda/${bigdata_base_image}:${TAG}
cd custom-image
openssl genrsa -3 -out enclave-key.pem 3072
export bigdata_custom_image=intelanalytics/bigdl-ppml-trusted-bigdata-gramine-reference-32g
sudo docker build \
--build-arg BASE_IMAGE_NAME=${bigdata_base_image} \
--build-arg BASE_IMAGE_TAG=${TAG} \
--build-arg SGX_MEM_SIZE=32G \
--build-arg SGX_LOG_LEVEL=error \
-t ${bigdata_custom_image}:${TAG} \
-f ./Dockerfile .
sudo docker push ${bigdata_custom_image}:${TAG}
sudo docker tag ${bigdata_custom_image}:${TAG} 10.239.45.10/arda/${bigdata_custom_image}:${TAG}
sudo docker push 10.239.45.10/arda/${bigdata_custom_image}:${TAG}
sudo docker tag ${bigdata_custom_image}:${TAG} intelanalytics/bigdl-ppml-trusted-bigdata-gramine-reference:${TAG}
sudo docker push intelanalytics/bigdl-ppml-trusted-bigdata-gramine-reference:${TAG}
sudo docker rmi -f ${bigdata_custom_image}:${TAG}
export bigdata_custom_image=intelanalytics/bigdl-ppml-trusted-bigdata-gramine-reference-64g
sudo docker build \
--build-arg BASE_IMAGE_NAME=${bigdata_base_image} \
--build-arg BASE_IMAGE_TAG=${TAG} \
--build-arg SGX_MEM_SIZE=64G \
--build-arg SGX_LOG_LEVEL=error \
-t ${bigdata_custom_image}:${TAG} \
-f ./Dockerfile .
sudo docker push ${bigdata_custom_image}:${TAG}
sudo docker tag ${bigdata_custom_image}:${TAG} 10.239.45.10/arda/${bigdata_custom_image}:${TAG}
sudo docker push 10.239.45.10/arda/${bigdata_custom_image}:${TAG}
sudo docker rmi -f ${base_image}:${TAG}
sudo docker rmi -f ${bigdata_base_image}:${TAG}
sudo docker rmi -f ${bigdata_custom_image}:${TAG}
bigdl-ppml-trusted-deep-learning-gramine-base:
if: ${{ github.event.inputs.artifact == 'bigdl-ppml-trusted-deep-learning-gramine-base' || github.event.inputs.artifact == 'all' }}
runs-on: [self-hosted, Shire]
@ -182,6 +130,7 @@ jobs:
--build-arg no_proxy=${NO_PROXY} \
--build-arg BASE_IMAGE_NAME=${base_image} \
--build-arg SGX_MEM_SIZE=64G \
--build-arg SGX_LOG_LEVEL=error \
--build-arg BASE_IMAGE_TAG=${TAG} \
-t ${image}:${TAG} -f ./Dockerfile .
sudo docker push ${image}:${TAG}