Occlum fastchat build Use nocache and update order (#8972)
This commit is contained in:
parent
dd57623650
commit
7563b26ca9
2 changed files with 4 additions and 3 deletions
4
.github/workflows/manually_build.yml
vendored
4
.github/workflows/manually_build.yml
vendored
|
|
@ -11,10 +11,10 @@ on:
|
|||
options:
|
||||
- all
|
||||
- bigdl-ppml-gramine-base
|
||||
- bigdl-ppml-trusted-deep-learning-gramine-base
|
||||
- bigdl-ppml-trusted-deep-learning-gramine-ref
|
||||
- bigdl-ppml-trusted-bigdl-llm-gramine-base
|
||||
- bigdl-ppml-trusted-bigdl-llm-gramine-ref
|
||||
- bigdl-ppml-trusted-deep-learning-gramine-base
|
||||
- bigdl-ppml-trusted-deep-learning-gramine-ref
|
||||
- bigdl-ppml-trusted-dl-serving-gramine-base
|
||||
- bigdl-ppml-trusted-dl-serving-gramine-ref
|
||||
- bigdl-ppml-trusted-big-data-ml-python-gramine
|
||||
|
|
|
|||
|
|
@ -24,10 +24,10 @@ on:
|
|||
- bigdl-ppml-trusted-big-data-ml-python-gramine
|
||||
- bigdl-ppml-trusted-big-data-ml-python-gramine-noattest
|
||||
- bigdl-ppml-trusted-big-data-ml-python-graphene
|
||||
- bigdl-ppml-trusted-llm-fastchat-occlum
|
||||
- bigdl-ppml-trusted-big-data-ml-scala-occlum
|
||||
- bigdl-ppml-trusted-big-data-ml-scala-occlum-production
|
||||
- bigdl-ppml-trusted-big-data-ml-scala-occlum-production-customer
|
||||
- bigdl-ppml-trusted-llm-fastchat-occlum
|
||||
- bigdl-ppml-trusted-realtime-ml-scala-graphene
|
||||
- bigdl-ppml-trusted-realtime-ml-scala-occlum
|
||||
- bigdl-ppml-trusted-bigdl-llm-tdx
|
||||
|
|
@ -542,6 +542,7 @@ jobs:
|
|||
export image=intelanalytics/bigdl-ppml-trusted-llm-fastchat-occlum
|
||||
pwd
|
||||
docker build \
|
||||
--no-cache=true \
|
||||
--build-arg http_proxy=${HTTP_PROXY} \
|
||||
--build-arg https_proxy=${HTTPS_PROXY} \
|
||||
--build-arg HTTP_PROXY_HOST=${HTTP_PROXY_HOST_2} \
|
||||
|
|
|
|||
Loading…
Reference in a new issue