LLM: fix installation of codellama (#9813)
This commit is contained in:
parent
4d01069302
commit
6584539c91
4 changed files with 4 additions and 0 deletions
|
|
@ -13,6 +13,7 @@ conda create -n llm python=3.9
|
|||
conda activate llm
|
||||
|
||||
pip install bigdl-llm[all] # install bigdl-llm with 'all' option
|
||||
pip install transformers==4.34.1 # CodeLlamaTokenizer is supported in higher version of transformers
|
||||
```
|
||||
|
||||
### 2. Run
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ conda create -n llm python=3.9 # recommend to use Python 3.9
|
|||
conda activate llm
|
||||
|
||||
pip install --pre --upgrade bigdl-llm[all] # install the latest bigdl-llm nightly build with 'all' option
|
||||
pip install transformers==4.34.1 # CodeLlamaTokenizer is supported in higher version of transformers
|
||||
```
|
||||
|
||||
### 2. Run
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ conda activate llm
|
|||
# below command will install intel_extension_for_pytorch==2.0.110+xpu as default
|
||||
# you can install specific ipex/torch version for your need
|
||||
pip install --pre --upgrade bigdl-llm[xpu] -f https://developer.intel.com/ipex-whl-stable-xpu
|
||||
pip install transformers==4.34.1 # CodeLlamaTokenizer is supported in higher version of transformers
|
||||
```
|
||||
|
||||
### 2. Configures OneAPI environment variables
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ conda activate llm
|
|||
# below command will install intel_extension_for_pytorch==2.0.110+xpu as default
|
||||
# you can install specific ipex/torch version for your need
|
||||
pip install --pre --upgrade bigdl-llm[xpu] -f https://developer.intel.com/ipex-whl-stable-xpu
|
||||
pip install transformers==4.34.1 # CodeLlamaTokenizer is supported in higher version of transformers
|
||||
```
|
||||
|
||||
### 2. Configures OneAPI environment variables
|
||||
|
|
|
|||
Loading…
Reference in a new issue