Occlum fastchat build Use nocache and update order (#8972)

This commit is contained in:
Wang Jian 2023-09-14 14:05:15 +08:00 committed by GitHub
parent dd57623650
commit 7563b26ca9
2 changed files with 4 additions and 3 deletions

View file

@ -11,10 +11,10 @@ on:
options: options:
- all - all
- bigdl-ppml-gramine-base - bigdl-ppml-gramine-base
- bigdl-ppml-trusted-deep-learning-gramine-base
- bigdl-ppml-trusted-deep-learning-gramine-ref
- bigdl-ppml-trusted-bigdl-llm-gramine-base - bigdl-ppml-trusted-bigdl-llm-gramine-base
- bigdl-ppml-trusted-bigdl-llm-gramine-ref - bigdl-ppml-trusted-bigdl-llm-gramine-ref
- bigdl-ppml-trusted-deep-learning-gramine-base
- bigdl-ppml-trusted-deep-learning-gramine-ref
- bigdl-ppml-trusted-dl-serving-gramine-base - bigdl-ppml-trusted-dl-serving-gramine-base
- bigdl-ppml-trusted-dl-serving-gramine-ref - bigdl-ppml-trusted-dl-serving-gramine-ref
- bigdl-ppml-trusted-big-data-ml-python-gramine - bigdl-ppml-trusted-big-data-ml-python-gramine

View file

@ -24,10 +24,10 @@ on:
- bigdl-ppml-trusted-big-data-ml-python-gramine - bigdl-ppml-trusted-big-data-ml-python-gramine
- bigdl-ppml-trusted-big-data-ml-python-gramine-noattest - bigdl-ppml-trusted-big-data-ml-python-gramine-noattest
- bigdl-ppml-trusted-big-data-ml-python-graphene - bigdl-ppml-trusted-big-data-ml-python-graphene
- bigdl-ppml-trusted-llm-fastchat-occlum
- bigdl-ppml-trusted-big-data-ml-scala-occlum - bigdl-ppml-trusted-big-data-ml-scala-occlum
- bigdl-ppml-trusted-big-data-ml-scala-occlum-production - bigdl-ppml-trusted-big-data-ml-scala-occlum-production
- bigdl-ppml-trusted-big-data-ml-scala-occlum-production-customer - bigdl-ppml-trusted-big-data-ml-scala-occlum-production-customer
- bigdl-ppml-trusted-llm-fastchat-occlum
- bigdl-ppml-trusted-realtime-ml-scala-graphene - bigdl-ppml-trusted-realtime-ml-scala-graphene
- bigdl-ppml-trusted-realtime-ml-scala-occlum - bigdl-ppml-trusted-realtime-ml-scala-occlum
- bigdl-ppml-trusted-bigdl-llm-tdx - bigdl-ppml-trusted-bigdl-llm-tdx
@ -542,6 +542,7 @@ jobs:
export image=intelanalytics/bigdl-ppml-trusted-llm-fastchat-occlum export image=intelanalytics/bigdl-ppml-trusted-llm-fastchat-occlum
pwd pwd
docker build \ docker build \
--no-cache=true \
--build-arg http_proxy=${HTTP_PROXY} \ --build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \ --build-arg https_proxy=${HTTPS_PROXY} \
--build-arg HTTP_PROXY_HOST=${HTTP_PROXY_HOST_2} \ --build-arg HTTP_PROXY_HOST=${HTTP_PROXY_HOST_2} \