diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm/README.md index 09172bcb..9f79516b 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm/README.md @@ -20,6 +20,7 @@ conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option +pip install "transformers<4.34.1" # chatglm cannot work with transformers 4.34.1+ ``` ### 2. Run diff --git a/python/llm/example/CPU/PyTorch-Models/Model/chatglm/README.md b/python/llm/example/CPU/PyTorch-Models/Model/chatglm/README.md index be040a03..bd5a3167 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/chatglm/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/chatglm/README.md @@ -15,6 +15,7 @@ conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option +pip install "transformers<4.34.1" # chatglm cannot work with transformers 4.34.1+ ``` ### 2. Run