From ce6fcaa9baaf163cf1d9c2056ddd82e18f166c60 Mon Sep 17 00:00:00 2001 From: "Chu,Youcheng" <1340390339@qq.com> Date: Wed, 27 Nov 2024 15:02:25 +0800 Subject: [PATCH] update transformers version in example of glm4 (#12453) * fix: update transformers version in example of glm4 * fix: textual adjustments * fix: texual adjustment --- .../CPU/HF-Transformers-AutoModels/Model/glm4/README.md | 4 ++-- python/llm/example/GPU/HuggingFace/LLM/glm4/README.md | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/glm4/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/glm4/README.md index cb0b20d7..01ca417f 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/glm4/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/glm4/README.md @@ -17,8 +17,8 @@ conda activate llm # install the latest ipex-llm nightly build with 'all' option pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pytorch.org/whl/cpu -# install packages required for GLM-4 -pip install "tiktoken>=0.7.0" transformers==4.42.4 "trl<0.12.0" +# install packages required for GLM-4, it is recommended to use transformers>=4.44 for THUDM/glm-4-9b-chat updated after August 12, 2024 +pip install "tiktoken>=0.7.0" transformers==4.44 "trl<0.12.0" ``` On Windows: diff --git a/python/llm/example/GPU/HuggingFace/LLM/glm4/README.md b/python/llm/example/GPU/HuggingFace/LLM/glm4/README.md index 9cf550c2..aa985dd7 100644 --- a/python/llm/example/GPU/HuggingFace/LLM/glm4/README.md +++ b/python/llm/example/GPU/HuggingFace/LLM/glm4/README.md @@ -13,8 +13,8 @@ conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ -# install packages required for GLM-4 -pip install "tiktoken>=0.7.0" transformers==4.42.4 "trl<0.12.0" +# install packages required for GLM-4, it is recommended to use transformers>=4.44 for THUDM/glm-4-9b-chat updated after August 12, 2024 +pip install "tiktoken>=0.7.0" transformers==4.44 "trl<0.12.0" ``` ### 1.2 Installation on Windows @@ -26,8 +26,8 @@ conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ -# install packages required for GLM-4 -pip install "tiktoken>=0.7.0" transformers==4.42.4 "trl<0.12.0" +# install packages required for GLM-4, it is recommended to use transformers>=4.44 for THUDM/glm-4-9b-chat updated after August 12, 2024 +pip install "tiktoken>=0.7.0" transformers==4.44 "trl<0.12.0" ``` ## 2. Configures OneAPI environment variables for Linux