Fix DPO finetuning example (#12313)
This commit is contained in:
		
							parent
							
								
									05c5d0267a
								
							
						
					
					
						commit
						126f95be80
					
				
					 2 changed files with 5 additions and 4 deletions
				
			
		| 
						 | 
				
			
			@ -17,8 +17,9 @@ conda create -n llm python=3.11
 | 
			
		|||
conda activate llm
 | 
			
		||||
# below command will install intel_extension_for_pytorch==2.1.10+xpu as default
 | 
			
		||||
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
 | 
			
		||||
pip install transformers==4.36.0 datasets
 | 
			
		||||
pip install trl peft==0.10.0
 | 
			
		||||
pip install datasets
 | 
			
		||||
pip install peft==0.10.0
 | 
			
		||||
pip install 'trl<0.9'
 | 
			
		||||
# Note, if you don't want to reinstall BNBs dependencies, append the `--no-deps` flag!
 | 
			
		||||
pip install --no-deps --force-reinstall 'https://github.com/bitsandbytes-foundation/bitsandbytes/releases/download/continuous-release_multi-backend-refactor/bitsandbytes-0.44.1.dev0-py3-none-manylinux_2_24_x86_64.whl'
 | 
			
		||||
```
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -34,12 +34,12 @@
 | 
			
		|||
import os
 | 
			
		||||
import torch
 | 
			
		||||
 | 
			
		||||
from ipex_llm.transformers.qlora import get_peft_model, prepare_model_for_kbit_training
 | 
			
		||||
from ipex_llm.transformers import AutoModelForCausalLM
 | 
			
		||||
import transformers
 | 
			
		||||
from transformers import AutoTokenizer, TrainingArguments, BitsAndBytesConfig
 | 
			
		||||
from datasets import load_dataset
 | 
			
		||||
from peft import LoraConfig
 | 
			
		||||
from ipex_llm.transformers.qlora import get_peft_model, prepare_model_for_kbit_training
 | 
			
		||||
from ipex_llm.transformers import AutoModelForCausalLM
 | 
			
		||||
from trl import DPOTrainer
 | 
			
		||||
import argparse
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in a new issue