Remove accelerate 0.23.0 install command in readme and docker (#11333)

*ipex-llm's accelerate has been upgraded to 0.23.0. Remove accelerate 0.23.0 install command in README and docker。
This commit is contained in:
Qiyuan Gong 2024-06-17 17:52:12 +08:00 committed by GitHub
parent ef4b6519fb
commit de4bb97b4f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 2 additions and 17 deletions

View file

@ -381,7 +381,7 @@ jobs:
shell: bash
run: |
python -m pip uninstall datasets -y
python -m pip install transformers==4.36.0 datasets peft==0.10.0 accelerate==0.23.0
python -m pip install transformers==4.36.0 datasets peft==0.10.0
python -m pip install bitsandbytes scipy
# Specific oneapi position on arc ut test machines
if [[ "$RUNNER_OS" == "Linux" ]]; then

View file

@ -50,7 +50,6 @@ RUN mkdir -p /ipex_llm/data && mkdir -p /ipex_llm/model && \
# install huggingface dependencies
pip install datasets transformers==4.36.0 && \
pip install fire peft==0.10.0 && \
pip install accelerate==0.23.0 && \
pip install bitsandbytes && \
# get qlora example code
cd /ipex_llm && \

View file

@ -63,7 +63,6 @@ RUN mkdir -p /ipex_llm/data && mkdir -p /ipex_llm/model && \
# install huggingface dependencies
pip install datasets transformers==4.36.0 && \
pip install fire peft==0.10.0 && \
pip install accelerate==0.23.0 && \
# install basic dependencies
apt-get update && apt-get install -y curl wget gpg gpg-agent && \
# Install Intel oneAPI keys.

View file

@ -41,7 +41,7 @@ RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRO
rm -rf IPEX-LLM && \
# install transformers & peft dependencies
pip install transformers==4.36.0 && \
pip install peft==0.10.0 datasets accelerate==0.23.0 && \
pip install peft==0.10.0 datasets && \
pip install bitsandbytes scipy fire && \
# Prepare accelerate config
mkdir -p /root/.cache/huggingface/accelerate && \

View file

@ -216,7 +216,6 @@ pip install -e .
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
# install transformers etc
pip install accelerate==0.23.0
# to avoid https://github.com/OpenAccess-AI-Collective/axolotl/issues/1544
pip install datasets==2.15.0
pip install transformers==4.37.0

View file

@ -22,7 +22,6 @@ pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pyt
pip install transformers==4.36.0
pip install peft==0.10.0
pip install datasets
pip install accelerate==0.23.0
pip install bitsandbytes scipy
```

View file

@ -10,7 +10,6 @@ conda activate llm
pip install --pre --upgrade ipex-llm[all]
pip install datasets transformers==4.36.0
pip install fire peft==0.10.0
pip install accelerate==0.23.0
pip install bitsandbytes scipy
```

View file

@ -19,7 +19,6 @@ conda activate llm
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
pip install transformers==4.36.0 datasets
pip install trl peft==0.10.0
pip install accelerate==0.23.0
pip install bitsandbytes
```

View file

@ -17,7 +17,6 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte
pip install transformers==4.36.0 datasets
pip install fire peft==0.10.0
pip install oneccl_bind_pt==2.1.100 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ # necessary to run distributed finetuning
pip install accelerate==0.23.0
pip install bitsandbytes scipy
```

View file

@ -13,7 +13,6 @@ conda create -n llm python=3.11
conda activate llm
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
pip install accelerate==0.23.0
pip install bitsandbytes==0.43.0
pip install datasets==2.18.0
pip install --upgrade transformers==4.36.0

View file

@ -15,7 +15,6 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte
pip install transformers==4.36.0 datasets
pip install fire peft==0.10.0
pip install oneccl_bind_pt==2.1.100 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ # necessary to run distributed finetuning
pip install accelerate==0.23.0
pip install bitsandbytes scipy
```

View file

@ -15,7 +15,6 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte
pip install transformers==4.36.0 datasets
pip install fire peft==0.10.0
pip install oneccl_bind_pt==2.1.100 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ # necessary to run distributed finetuning
pip install accelerate==0.23.0
pip install bitsandbytes scipy
```

View file

@ -18,7 +18,6 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte
pip install transformers==4.36.0 datasets
pip install fire peft==0.10.0
pip install oneccl_bind_pt==2.1.100 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ # necessary to run distributed finetuning
pip install accelerate==0.23.0
pip install bitsandbytes scipy
# configures OneAPI environment variables
source /opt/intel/oneapi/setvars.sh # necessary to run before installing deepspeed

View file

@ -19,7 +19,6 @@ conda activate llm
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
pip install transformers==4.36.0 datasets
pip install peft==0.10.0
pip install accelerate==0.23.0
pip install bitsandbytes scipy
```

View file

@ -19,7 +19,6 @@ conda activate llm
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
pip install transformers==4.36.0 datasets
pip install peft==0.10.0
pip install accelerate==0.23.0
pip install bitsandbytes scipy trl
```

View file

@ -15,7 +15,6 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte
pip install transformers==4.36.0 datasets
pip install fire peft==0.10.0
pip install oneccl_bind_pt==2.1.100 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ # necessary to run distributed finetuning
pip install accelerate==0.23.0
pip install bitsandbytes scipy
```

View file

@ -132,7 +132,6 @@ pip install -e .
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
# install transformers etc
pip install accelerate==0.23.0
# to avoid https://github.com/OpenAccess-AI-Collective/axolotl/issues/1544
pip install datasets==2.15.0
pip install transformers==4.37.0