[LLM] Add support for PyTorch 2.1 install in UT for GPU (#9845)
* Add support for ipex 2.1 install in UT and fix perf test * Small fix
This commit is contained in:
parent
16433dd959
commit
f25d23dfbf
2 changed files with 16 additions and 9 deletions
21
.github/actions/llm/setup-llm-env/action.yml
vendored
21
.github/actions/llm/setup-llm-env/action.yml
vendored
|
|
@ -11,12 +11,16 @@ runs:
|
||||||
- name: Create conda env for llm tests and conduct install tests
|
- name: Create conda env for llm tests and conduct install tests
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
# make sure we install the latest version for bigdl-core-xe
|
# make sure we install the latest version for bigdl-core-xe related packages
|
||||||
pip uninstall bigdl-core-xe || true
|
pip uninstall bigdl-core-xe -y || true
|
||||||
|
pip uninstall bigdl-core-xe-esimd -y || true
|
||||||
|
pip uninstall bigdl-core-xe-21 -y || true
|
||||||
|
pip uninstall bigdl-core-xe-esimd-21 -y || true
|
||||||
sed -i 's/"bigdl-core-xe==" + VERSION + "/"bigdl-core-xe/g' python/llm/setup.py
|
sed -i 's/"bigdl-core-xe==" + VERSION + "/"bigdl-core-xe/g' python/llm/setup.py
|
||||||
# make sure we install the latest version for bigdl-core-xe-esimd
|
|
||||||
pip uninstall bigdl-core-xe-esimd || true
|
|
||||||
sed -i 's/"bigdl-core-xe-esimd==" + VERSION + "/"bigdl-core-xe-esimd/g' python/llm/setup.py
|
sed -i 's/"bigdl-core-xe-esimd==" + VERSION + "/"bigdl-core-xe-esimd/g' python/llm/setup.py
|
||||||
|
sed -i 's/"bigdl-core-xe-21==" + VERSION + "/"bigdl-core-xe-21/g' python/llm/setup.py
|
||||||
|
sed -i 's/"bigdl-core-xe-21==" + VERSION/"bigdl-core-xe-21"/g' python/llm/setup.py
|
||||||
|
sed -i 's/"bigdl-core-xe-esimd-21==" + VERSION + "/"bigdl-core-xe-esimd/g' python/llm/setup.py
|
||||||
|
|
||||||
pip install requests
|
pip install requests
|
||||||
if [[ ${{ runner.os }} == 'Linux' ]]; then
|
if [[ ${{ runner.os }} == 'Linux' ]]; then
|
||||||
|
|
@ -28,9 +32,12 @@ runs:
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
whl_name=$(ls python/llm/dist)
|
whl_name=$(ls python/llm/dist)
|
||||||
if [[ ${{ inputs.extra-dependency }} == 'xpu' ]]; then
|
if [[ ${{ inputs.extra-dependency }} == 'xpu_2.0' ]]; then
|
||||||
pip install --upgrade --pre -i https://pypi.python.org/simple --force-reinstall "python/llm/dist/${whl_name}[xpu]" -f https://developer.intel.com/ipex-whl-stable-xpu
|
pip install --upgrade --pre -i https://pypi.python.org/simple --force-reinstall "python/llm/dist/${whl_name}[xpu_2.0]" -f https://developer.intel.com/ipex-whl-stable-xpu
|
||||||
pip install pytest datasets librosa soundfile
|
pip install pytest expecttest
|
||||||
|
elif [[ ${{ inputs.extra-dependency }} == 'xpu_2.1' ]]; then
|
||||||
|
pip install --upgrade --pre -i https://pypi.python.org/simple --force-reinstall "python/llm/dist/${whl_name}[xpu_2.1]" -f https://developer.intel.com/ipex-whl-stable-xpu
|
||||||
|
pip install pytest expecttest
|
||||||
else
|
else
|
||||||
pip install --upgrade --pre -i https://pypi.python.org/simple --force-reinstall "python/llm/dist/${whl_name}[all]"
|
pip install --upgrade --pre -i https://pypi.python.org/simple --force-reinstall "python/llm/dist/${whl_name}[all]"
|
||||||
pip install pytest
|
pip install pytest
|
||||||
|
|
|
||||||
4
.github/workflows/llm_unit_tests.yml
vendored
4
.github/workflows/llm_unit_tests.yml
vendored
|
|
@ -249,7 +249,7 @@ jobs:
|
||||||
- name: Run LLM install (all) test
|
- name: Run LLM install (all) test
|
||||||
uses: ./.github/actions/llm/setup-llm-env
|
uses: ./.github/actions/llm/setup-llm-env
|
||||||
with:
|
with:
|
||||||
extra-dependency: "xpu"
|
extra-dependency: "xpu_2.0"
|
||||||
|
|
||||||
- name: Test installed xpu version
|
- name: Test installed xpu version
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
@ -296,7 +296,7 @@ jobs:
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
source /opt/intel/oneapi/setvars.sh
|
source /opt/intel/oneapi/setvars.sh
|
||||||
python -m pip install expecttest einops librosa
|
python -m pip install datasets librosa soundfile einops
|
||||||
bash python/llm/test/run-llm-inference-tests-gpu.sh
|
bash python/llm/test/run-llm-inference-tests-gpu.sh
|
||||||
|
|
||||||
- name: Run LLM example tests
|
- name: Run LLM example tests
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue