From 37106a877cd83695602779193b66f297de2d88cb Mon Sep 17 00:00:00 2001 From: Yuwen Hu <54161268+Oscilloscope98@users.noreply.github.com> Date: Wed, 21 Aug 2024 03:09:14 +0800 Subject: [PATCH] igpu performance test smal fix (#11872) --- .github/workflows/llm_performance_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/llm_performance_tests.yml b/.github/workflows/llm_performance_tests.yml index 736b1dd4..f740cf51 100644 --- a/.github/workflows/llm_performance_tests.yml +++ b/.github/workflows/llm_performance_tests.yml @@ -653,7 +653,6 @@ jobs: set BIGDL_LLM_XMX_DISABLED=1 REM for llava set TRANSFORMERS_OFFLINE=1 - pip install transformers==4.37.0 cd python\llm\dev\benchmark\all-in-one move ..\..\..\test\benchmark\igpu-perf\32-32_int4_fp16.yaml config.yaml @@ -895,6 +894,7 @@ jobs: shell: cmd run: | call conda activate igpu-perf + pip install transformers==4.37.0 set SYCL_CACHE_PERSISTENT=1 set BIGDL_LLM_XMX_DISABLED=1