From f25d23dfbff9ef7100b6e8c17ba1280040bcd0cb Mon Sep 17 00:00:00 2001 From: Yuwen Hu <54161268+Oscilloscope98@users.noreply.github.com> Date: Fri, 5 Jan 2024 16:13:18 +0800 Subject: [PATCH] [LLM] Add support for PyTorch 2.1 install in UT for GPU (#9845) * Add support for ipex 2.1 install in UT and fix perf test * Small fix --- .github/actions/llm/setup-llm-env/action.yml | 21 +++++++++++++------- .github/workflows/llm_unit_tests.yml | 4 ++-- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/.github/actions/llm/setup-llm-env/action.yml b/.github/actions/llm/setup-llm-env/action.yml index 4a9fb201..9764a21b 100644 --- a/.github/actions/llm/setup-llm-env/action.yml +++ b/.github/actions/llm/setup-llm-env/action.yml @@ -11,12 +11,16 @@ runs: - name: Create conda env for llm tests and conduct install tests shell: bash run: | - # make sure we install the latest version for bigdl-core-xe - pip uninstall bigdl-core-xe || true + # make sure we install the latest version for bigdl-core-xe related packages + pip uninstall bigdl-core-xe -y || true + pip uninstall bigdl-core-xe-esimd -y || true + pip uninstall bigdl-core-xe-21 -y || true + pip uninstall bigdl-core-xe-esimd-21 -y || true sed -i 's/"bigdl-core-xe==" + VERSION + "/"bigdl-core-xe/g' python/llm/setup.py - # make sure we install the latest version for bigdl-core-xe-esimd - pip uninstall bigdl-core-xe-esimd || true sed -i 's/"bigdl-core-xe-esimd==" + VERSION + "/"bigdl-core-xe-esimd/g' python/llm/setup.py + sed -i 's/"bigdl-core-xe-21==" + VERSION + "/"bigdl-core-xe-21/g' python/llm/setup.py + sed -i 's/"bigdl-core-xe-21==" + VERSION/"bigdl-core-xe-21"/g' python/llm/setup.py + sed -i 's/"bigdl-core-xe-esimd-21==" + VERSION + "/"bigdl-core-xe-esimd/g' python/llm/setup.py pip install requests if [[ ${{ runner.os }} == 'Linux' ]]; then @@ -28,9 +32,12 @@ runs: exit 1 fi whl_name=$(ls python/llm/dist) - if [[ ${{ inputs.extra-dependency }} == 'xpu' ]]; then - pip install --upgrade --pre -i https://pypi.python.org/simple --force-reinstall "python/llm/dist/${whl_name}[xpu]" -f https://developer.intel.com/ipex-whl-stable-xpu - pip install pytest datasets librosa soundfile + if [[ ${{ inputs.extra-dependency }} == 'xpu_2.0' ]]; then + pip install --upgrade --pre -i https://pypi.python.org/simple --force-reinstall "python/llm/dist/${whl_name}[xpu_2.0]" -f https://developer.intel.com/ipex-whl-stable-xpu + pip install pytest expecttest + elif [[ ${{ inputs.extra-dependency }} == 'xpu_2.1' ]]; then + pip install --upgrade --pre -i https://pypi.python.org/simple --force-reinstall "python/llm/dist/${whl_name}[xpu_2.1]" -f https://developer.intel.com/ipex-whl-stable-xpu + pip install pytest expecttest else pip install --upgrade --pre -i https://pypi.python.org/simple --force-reinstall "python/llm/dist/${whl_name}[all]" pip install pytest diff --git a/.github/workflows/llm_unit_tests.yml b/.github/workflows/llm_unit_tests.yml index 407da10e..bbecb097 100644 --- a/.github/workflows/llm_unit_tests.yml +++ b/.github/workflows/llm_unit_tests.yml @@ -249,7 +249,7 @@ jobs: - name: Run LLM install (all) test uses: ./.github/actions/llm/setup-llm-env with: - extra-dependency: "xpu" + extra-dependency: "xpu_2.0" - name: Test installed xpu version shell: bash @@ -296,7 +296,7 @@ jobs: shell: bash run: | source /opt/intel/oneapi/setvars.sh - python -m pip install expecttest einops librosa + python -m pip install datasets librosa soundfile einops bash python/llm/test/run-llm-inference-tests-gpu.sh - name: Run LLM example tests