diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/glm4/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/glm4/README.md index cb0b20d7..01ca417f 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/glm4/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/glm4/README.md @@ -17,8 +17,8 @@ conda activate llm # install the latest ipex-llm nightly build with 'all' option pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pytorch.org/whl/cpu -# install packages required for GLM-4 -pip install "tiktoken>=0.7.0" transformers==4.42.4 "trl<0.12.0" +# install packages required for GLM-4, it is recommended to use transformers>=4.44 for THUDM/glm-4-9b-chat updated after August 12, 2024 +pip install "tiktoken>=0.7.0" transformers==4.44 "trl<0.12.0" ``` On Windows: diff --git a/python/llm/example/GPU/HuggingFace/LLM/glm4/README.md b/python/llm/example/GPU/HuggingFace/LLM/glm4/README.md index 9cf550c2..aa985dd7 100644 --- a/python/llm/example/GPU/HuggingFace/LLM/glm4/README.md +++ b/python/llm/example/GPU/HuggingFace/LLM/glm4/README.md @@ -13,8 +13,8 @@ conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ -# install packages required for GLM-4 -pip install "tiktoken>=0.7.0" transformers==4.42.4 "trl<0.12.0" +# install packages required for GLM-4, it is recommended to use transformers>=4.44 for THUDM/glm-4-9b-chat updated after August 12, 2024 +pip install "tiktoken>=0.7.0" transformers==4.44 "trl<0.12.0" ``` ### 1.2 Installation on Windows @@ -26,8 +26,8 @@ conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ -# install packages required for GLM-4 -pip install "tiktoken>=0.7.0" transformers==4.42.4 "trl<0.12.0" +# install packages required for GLM-4, it is recommended to use transformers>=4.44 for THUDM/glm-4-9b-chat updated after August 12, 2024 +pip install "tiktoken>=0.7.0" transformers==4.44 "trl<0.12.0" ``` ## 2. Configures OneAPI environment variables for Linux