Deprecate support for pytorch 2.0 on Linux for ipex-llm >= 2.1.0b20240511 (#10986)
* Remove xpu_2.0 option in setup.py * Disable xpu_2.0 test in UT and nightly * Update docs for deprecated pytorch 2.0 * Small doc update
This commit is contained in:
parent
5e0872073e
commit
9f6358e4c2
3 changed files with 15 additions and 20 deletions
3
.github/workflows/llm_unit_tests.yml
vendored
3
.github/workflows/llm_unit_tests.yml
vendored
|
|
@ -239,7 +239,8 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
pytorch-version: ['2.1', '2.0']
|
||||
# pytorch-version: ['2.1', '2.0']
|
||||
pytorch-version: ['2.1']
|
||||
python-version: ${{ fromJson(needs.setup-python-version.outputs.python-version) }}
|
||||
runs-on: [self-hosted, llm, arc-ut]
|
||||
env:
|
||||
|
|
|
|||
|
|
@ -187,6 +187,10 @@ IPEX-LLM GPU support on Linux has been verified on:
|
|||
.. important::
|
||||
|
||||
IPEX-LLM on Linux supports PyTorch 2.0 and PyTorch 2.1.
|
||||
|
||||
.. warning::
|
||||
|
||||
IPEX-LLM support for Pytorch 2.0 is deprecated as of ``ipex-llm >= 2.1.0b20240511``.
|
||||
```
|
||||
|
||||
```eval_rst
|
||||
|
|
@ -311,7 +315,7 @@ IPEX-LLM GPU support on Linux has been verified on:
|
|||
cd /opt/intel/oneapi/installer
|
||||
sudo ./installer
|
||||
|
||||
.. tab:: PyTorch 2.0
|
||||
.. tab:: PyTorch 2.0 (deprecated for versions ``ipex-llm >= 2.1.0b20240511``)
|
||||
|
||||
To enable IPEX-LLM for Intel GPUs with PyTorch 2.0, here're several prerequisite steps for tools installation and environment preparation:
|
||||
|
||||
|
|
@ -432,7 +436,7 @@ We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) t
|
|||
```eval_rst
|
||||
.. tabs::
|
||||
.. tab:: PyTorch 2.1
|
||||
Choose either US or CN website for `extra-index-url`:
|
||||
Choose either US or CN website for ``extra-index-url``:
|
||||
|
||||
.. tabs::
|
||||
.. tab:: US
|
||||
|
|
@ -470,8 +474,8 @@ We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) t
|
|||
pip install --pre --upgrade ipex-llm[xpu_2.1] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/
|
||||
|
||||
|
||||
.. tab:: PyTorch 2.0
|
||||
Choose either US or CN website for `extra-index-url`:
|
||||
.. tab:: PyTorch 2.0 (deprecated for versions ``ipex-llm >= 2.1.0b20240511``)
|
||||
Choose either US or CN website for ``extra-index-url``:
|
||||
|
||||
.. tabs::
|
||||
.. tab:: US
|
||||
|
|
@ -481,7 +485,7 @@ We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) t
|
|||
conda create -n llm python=3.11
|
||||
conda activate llm
|
||||
|
||||
pip install --pre --upgrade ipex-llm[xpu_2.0] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
||||
pip install --pre --upgrade ipex-llm[xpu_2.0]==2.1.0b20240510 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
||||
|
||||
.. tab:: CN
|
||||
|
||||
|
|
@ -490,7 +494,7 @@ We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) t
|
|||
conda create -n llm python=3.11
|
||||
conda activate llm
|
||||
|
||||
pip install --pre --upgrade ipex-llm[xpu_2.0] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/
|
||||
pip install --pre --upgrade ipex-llm[xpu_2.0]==2.1.0b20240510 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/
|
||||
|
||||
```
|
||||
|
||||
|
|
@ -521,7 +525,7 @@ If you encounter network issues when installing IPEX, you can also install IPEX-
|
|||
# install ipex-llm for Intel GPU
|
||||
pip install --pre --upgrade ipex-llm[xpu]
|
||||
|
||||
.. tab:: PyTorch 2.0
|
||||
.. tab:: PyTorch 2.0 (deprecated for versions ``ipex-llm >= 2.1.0b20240511``)
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
|
|
@ -540,7 +544,7 @@ If you encounter network issues when installing IPEX, you can also install IPEX-
|
|||
pip install intel_extension_for_pytorch-2.0.110+xpu-cp311-cp311-linux_x86_64.whl
|
||||
|
||||
# install ipex-llm for Intel GPU
|
||||
pip install --pre --upgrade ipex-llm[xpu_2.0]
|
||||
pip install --pre --upgrade ipex-llm[xpu_2.0]==2.1.0b20240510
|
||||
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -285,15 +285,6 @@ def setup_package():
|
|||
|
||||
|
||||
# Linux install with --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
||||
xpu_20_requires = copy.deepcopy(all_requires)
|
||||
xpu_20_requires.remove('torch')
|
||||
# xpu_20 only works for linux now
|
||||
xpu_20_requires += ["torch==2.0.1a0;platform_system=='Linux'",
|
||||
"torchvision==0.15.2a0;platform_system=='Linux'",
|
||||
"intel_extension_for_pytorch==2.0.110+xpu;platform_system=='Linux'",
|
||||
"bigdl-core-xe==" + CORE_XE_VERSION + ";platform_system=='Linux'",
|
||||
"bigdl-core-xe-esimd==" + CORE_XE_VERSION + ";platform_system=='Linux'"]
|
||||
|
||||
xpu_21_requires = copy.deepcopy(all_requires)
|
||||
xpu_21_requires.remove('torch')
|
||||
xpu_21_requires += ["torch==2.1.0a0",
|
||||
|
|
@ -330,7 +321,6 @@ def setup_package():
|
|||
},
|
||||
extras_require={"all": all_requires,
|
||||
"xpu": xpu_requires, # default to ipex 2.1 for linux and windows
|
||||
"xpu-2-0": xpu_20_requires,
|
||||
"xpu-2-1": xpu_21_requires,
|
||||
"serving": serving_requires,
|
||||
"cpp": ["bigdl-core-cpp==" + CORE_XE_VERSION],
|
||||
|
|
|
|||
Loading…
Reference in a new issue