diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codegeex2/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codegeex2/README.md index 91a0a883..8f94bfbc 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codegeex2/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codegeex2/README.md @@ -18,6 +18,7 @@ conda activate llm # install the latest ipex-llm nightly build with 'all' option pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pytorch.org/whl/cpu +pip install transformers==4.31.0 ``` On Windows: @@ -27,6 +28,7 @@ conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] +pip install transformers==4.31.0 ``` ### 2. Run diff --git a/python/llm/example/CPU/PyTorch-Models/Model/codegeex2/README.md b/python/llm/example/CPU/PyTorch-Models/Model/codegeex2/README.md index 91a0a883..8f94bfbc 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/codegeex2/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/codegeex2/README.md @@ -18,6 +18,7 @@ conda activate llm # install the latest ipex-llm nightly build with 'all' option pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pytorch.org/whl/cpu +pip install transformers==4.31.0 ``` On Windows: @@ -27,6 +28,7 @@ conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] +pip install transformers==4.31.0 ``` ### 2. Run diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/codegeex2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/codegeex2/README.md index bc8cfa62..37f801a2 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/codegeex2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/codegeex2/README.md @@ -16,6 +16,7 @@ conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ +pip install transformers==4.31.0 ``` #### 1.2 Installation on Windows @@ -26,6 +27,7 @@ conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ +pip install transformers==4.31.0 ``` ### 2. Configures OneAPI environment variables for Linux diff --git a/python/llm/example/GPU/PyTorch-Models/Model/codegeex2/README.md b/python/llm/example/GPU/PyTorch-Models/Model/codegeex2/README.md index bc8cfa62..37f801a2 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/codegeex2/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/codegeex2/README.md @@ -16,6 +16,7 @@ conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ +pip install transformers==4.31.0 ``` #### 1.2 Installation on Windows @@ -26,6 +27,7 @@ conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ +pip install transformers==4.31.0 ``` ### 2. Configures OneAPI environment variables for Linux