Upgrade glm-4 example transformers version (#11659)
* upgrade glm-4 example transformers version * move pip install in one line
This commit is contained in:
parent
a44ab32153
commit
6e3ce28173
4 changed files with 14 additions and 14 deletions
|
|
@ -17,8 +17,8 @@ conda activate llm
|
||||||
# install the latest ipex-llm nightly build with 'all' option
|
# install the latest ipex-llm nightly build with 'all' option
|
||||||
pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pytorch.org/whl/cpu
|
pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pytorch.org/whl/cpu
|
||||||
|
|
||||||
# install tiktoken required for GLM-4
|
# install packages required for GLM-4
|
||||||
pip install "tiktoken>=0.7.0"
|
pip install "tiktoken>=0.7.0" transformers==4.42.4 trl
|
||||||
```
|
```
|
||||||
|
|
||||||
On Windows:
|
On Windows:
|
||||||
|
|
@ -29,7 +29,7 @@ conda activate llm
|
||||||
|
|
||||||
pip install --pre --upgrade ipex-llm[all]
|
pip install --pre --upgrade ipex-llm[all]
|
||||||
|
|
||||||
pip install "tiktoken>=0.7.0"
|
pip install "tiktoken>=0.7.0" transformers==4.42.4 trl
|
||||||
```
|
```
|
||||||
|
|
||||||
## 2. Run
|
## 2. Run
|
||||||
|
|
|
||||||
|
|
@ -20,8 +20,8 @@ conda activate llm
|
||||||
# install the latest ipex-llm nightly build with 'all' option
|
# install the latest ipex-llm nightly build with 'all' option
|
||||||
pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pytorch.org/whl/cpu
|
pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pytorch.org/whl/cpu
|
||||||
|
|
||||||
# install tiktoken required for GLM-4
|
# install packages required for GLM-4
|
||||||
pip install "tiktoken>=0.7.0"
|
pip install "tiktoken>=0.7.0" transformers==4.42.4 trl
|
||||||
```
|
```
|
||||||
|
|
||||||
On Windows:
|
On Windows:
|
||||||
|
|
@ -32,7 +32,7 @@ conda activate llm
|
||||||
|
|
||||||
pip install --pre --upgrade ipex-llm[all]
|
pip install --pre --upgrade ipex-llm[all]
|
||||||
|
|
||||||
pip install "tiktoken>=0.7.0"
|
pip install "tiktoken>=0.7.0" transformers==4.42.4 trl
|
||||||
```
|
```
|
||||||
|
|
||||||
### 2. Run
|
### 2. Run
|
||||||
|
|
|
||||||
|
|
@ -13,8 +13,8 @@ conda activate llm
|
||||||
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
|
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
|
||||||
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
||||||
|
|
||||||
# install tiktoken required for GLM-4
|
# install packages required for GLM-4
|
||||||
pip install "tiktoken>=0.7.0"
|
pip install "tiktoken>=0.7.0" transformers==4.42.4 trl
|
||||||
```
|
```
|
||||||
|
|
||||||
### 1.2 Installation on Windows
|
### 1.2 Installation on Windows
|
||||||
|
|
@ -26,8 +26,8 @@ conda activate llm
|
||||||
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
|
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
|
||||||
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
||||||
|
|
||||||
# install tiktoken required for GLM-4
|
# install packages required for GLM-4
|
||||||
pip install "tiktoken>=0.7.0"
|
pip install "tiktoken>=0.7.0" transformers==4.42.4 trl
|
||||||
```
|
```
|
||||||
|
|
||||||
## 2. Configures OneAPI environment variables for Linux
|
## 2. Configures OneAPI environment variables for Linux
|
||||||
|
|
|
||||||
|
|
@ -15,8 +15,8 @@ conda activate llm
|
||||||
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
|
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
|
||||||
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
||||||
|
|
||||||
# install tiktoken required for GLM-4
|
# install packages required for GLM-4
|
||||||
pip install "tiktoken>=0.7.0"
|
pip install "tiktoken>=0.7.0" transformers==4.42.4 trl
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 1.2 Installation on Windows
|
#### 1.2 Installation on Windows
|
||||||
|
|
@ -28,8 +28,8 @@ conda activate llm
|
||||||
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
|
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
|
||||||
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
||||||
|
|
||||||
# install tiktoken required for GLM-4
|
# install packages required for GLM-4
|
||||||
pip install "tiktoken>=0.7.0"
|
pip install "tiktoken>=0.7.0" transformers==4.42.4 trl
|
||||||
```
|
```
|
||||||
|
|
||||||
### 2. Configures OneAPI environment variables for Linux
|
### 2. Configures OneAPI environment variables for Linux
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue