diff --git a/.github/workflows/llm_performance_tests.yml b/.github/workflows/llm_performance_tests.yml index 59849ba4..bb9a2eaa 100644 --- a/.github/workflows/llm_performance_tests.yml +++ b/.github/workflows/llm_performance_tests.yml @@ -62,7 +62,7 @@ jobs: python -m pip install --upgrade transformers_stream_generator python -m pip install --upgrade tiktoken - # please uncomment it and comment the "Install BigDL-LLM from Pypi" part for PR tests + # please uncomment it and comment the "Install IPEX-LLM from Pypi" part for PR tests # - name: Download llm binary # uses: ./.github/actions/llm/download-llm-binary @@ -71,13 +71,13 @@ jobs: # with: # extra-dependency: "xpu_2.1" - - name: Install BigDL-LLM from Pypi + - name: Install IPEX-LLM from Pypi shell: bash run: | - pip install --pre --upgrade bigdl-llm[xpu] -f https://developer.intel.com/ipex-whl-stable-xpu + pip install --pre --upgrade ipex-llm[xpu] -f https://developer.intel.com/ipex-whl-stable-xpu test_version_date=`date -d 'yesterday' '+%Y%m%d'` - if ! pip show bigdl-llm | grep $test_version_date; then - echo "Did not install bigdl-llm with excepted version $test_version_date" + if ! pip show ipex-llm | grep $test_version_date; then + echo "Did not install ipex-llm with excepted version $test_version_date" exit 1 fi @@ -188,20 +188,20 @@ jobs: python -m pip install --upgrade tiktoken python -m pip install --upgrade transformers_stream_generator - # please uncomment it and comment the "Install BigDL-LLM from Pypi" part for PR tests + # please uncomment it and comment the "Install IPEX-LLM from Pypi" part for PR tests # - name: Download llm binary # uses: ./.github/actions/llm/download-llm-binary # - name: Run LLM install (all) test # uses: ./.github/actions/llm/setup-llm-env - - name: Install BigDL-LLM from Pypi + - name: Install IPEX-LLM from Pypi shell: bash run: | - pip install --pre --upgrade bigdl-llm[all] -f https://developer.intel.com/ipex-whl-stable-xpu + pip install --pre --upgrade ipex-llm[all] -f https://developer.intel.com/ipex-whl-stable-xpu test_version_date=`date -d 'yesterday' '+%Y%m%d'` - if ! pip show bigdl-llm | grep $test_version_date; then - echo "Did not install bigdl-llm with excepted version $test_version_date" + if ! pip show ipex-llm | grep $test_version_date; then + echo "Did not install ipex-llm with excepted version $test_version_date" exit 1 fi @@ -215,7 +215,7 @@ jobs: cd python/llm/dev/benchmark/all-in-one export http_proxy=${HTTP_PROXY} export https_proxy=${HTTPS_PROXY} - source bigdl-llm-init -t + source ipex-llm-init -t export OMP_NUM_THREADS=48 # hide time info sed -i 's/str(end - st)/"xxxxxx"/g' run.py @@ -225,9 +225,9 @@ jobs: python -m pip install pandas==1.5.3 python csv_to_html.py -f /models/nightly_perf_cpu cd /models/nightly_perf_cpu - for f in *.html; do - curl -T "$f" ${LLM_FTP_URL}/llm/nightly_perf/nightly_perf_cpu/ - done + # for f in *.html; do + # curl -T "$f" ${LLM_FTP_URL}/llm/nightly_perf/nightly_perf_cpu/ + # done llm-performance-test-on-core: if: ${{ github.event.schedule || github.event.inputs.artifact == 'llm-performance-test-on-core' || github.event.inputs.artifact == 'all' }} # please comment it for PR tests @@ -262,20 +262,20 @@ jobs: python -m pip install --upgrade omegaconf pandas python -m pip install --upgrade tiktoken einops transformers_stream_generator - # please uncomment it and comment the "Install BigDL-LLM from Pypi" part for PR tests + # please uncomment it and comment the "Install IPEX-LLM from Pypi" part for PR tests # - name: Download llm binary # uses: ./.github/actions/llm/download-llm-binary # - name: Run LLM install (all) test # uses: ./.github/actions/llm/setup-llm-env - - name: Install BigDL-LLM from Pypi + - name: Install IPEX-LLM from Pypi shell: bash run: | - pip install --pre --upgrade bigdl-llm[all] -f https://developer.intel.com/ipex-whl-stable-xpu + pip install --pre --upgrade ipex-llm[all] -f https://developer.intel.com/ipex-whl-stable-xpu test_version_date=`date -d 'yesterday' '+%Y%m%d'` - if ! pip show bigdl-llm | grep $test_version_date; then - echo "Did not install bigdl-llm with excepted version $test_version_date" + if ! pip show ipex-llm | grep $test_version_date; then + echo "Did not install ipex-llm with excepted version $test_version_date" exit 1 fi @@ -316,19 +316,19 @@ jobs: steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # actions/checkout@v3 - # TODO: Put the bigdl-llm related install process for win gpu into a action function + # TODO: Put the ipex-llm related install process for win gpu into a action function # Please uncomment it and commment the install from pypi for PR tests # - name: Download llm binary # uses: ./.github/actions/llm/download-llm-binary - # - name: Prepare for install bigdl-llm from source + # - name: Prepare for install ipex-llm from source # shell: bash # run: | # sed -i 's/"bigdl-core-xe-21==" + VERSION + "/"bigdl-core-xe-21/g' python/llm/setup.py # sed -i 's/"bigdl-core-xe-21==" + VERSION/"bigdl-core-xe-21"/g' python/llm/setup.py - # - name: Install bigdl-llm and other related packages (install from source) + # - name: Install ipex-llm and other related packages (install from source) # shell: cmd # run: | # call conda create -n igpu-perf python=${{ matrix.python-version }} libuv -y @@ -341,8 +341,8 @@ jobs: # cd python\llm # python setup.py clean --all bdist_wheel --win - # if not exist dist\bigdl_llm*.whl (exit /b 1) - # for %%i in (dist\bigdl_llm*.whl) do set whl_name=%%i + # if not exist dist\ipex_llm*.whl (exit /b 1) + # for %%i in (dist\ipex_llm*.whl) do set whl_name=%%i # pip install --pre --upgrade %whl_name%[xpu] -f https://developer.intel.com/ipex-whl-stable-xpu # if %ERRORLEVEL% neq 0 (exit /b 1) @@ -350,13 +350,13 @@ jobs: # call conda deactivate - - name: Determine desired bigdl-llm version + - name: Determine desired ipex-llm version shell: bash run: | test_version_date=`date -d 'yesterday' '+%Y%m%d'` echo "TEST_VERSION_DATE=${test_version_date}" >> "$GITHUB_ENV" - - name: Install bigdl-llm and other related packages (install from pypi) + - name: Install ipex-llm and other related packages (install from pypi) shell: cmd run: | call conda create -n igpu-perf python=${{ matrix.python-version }} libuv -y @@ -367,10 +367,10 @@ jobs: pip install --upgrade omegaconf pandas pip install --upgrade tiktoken einops transformers_stream_generator - pip install --pre --upgrade bigdl-llm[xpu] -f https://developer.intel.com/ipex-whl-stable-xpu - pip show bigdl-llm | findstr %TEST_VERSION_DATE% + pip install --pre --upgrade ipex-llm[xpu] -f https://developer.intel.com/ipex-whl-stable-xpu + pip show ipex-llm | findstr %TEST_VERSION_DATE% if %ERRORLEVEL% neq 0 ( - echo "Did not install bigdl-llm with excepted version %TEST_VERSION_DATE%" + echo "Did not install ipex-llm with excepted version %TEST_VERSION_DATE%" exit /b 1 ) pip list