LLM: add ui for portable-zip (#9262)
This commit is contained in:
		
							parent
							
								
									726203d778
								
							
						
					
					
						commit
						6a128aee32
					
				
					 5 changed files with 79 additions and 4 deletions
				
			
		
							
								
								
									
										2
									
								
								python/llm/portable-zip/.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								python/llm/portable-zip/.gitignore
									
									
									
									
										vendored
									
									
								
							| 
						 | 
					@ -1,2 +1,4 @@
 | 
				
			||||||
python-embed
 | 
					python-embed
 | 
				
			||||||
bigdl-llm.zip
 | 
					bigdl-llm.zip
 | 
				
			||||||
 | 
					*.log
 | 
				
			||||||
 | 
					*.json
 | 
				
			||||||
							
								
								
									
										22
									
								
								python/llm/portable-zip/README-ui.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								python/llm/portable-zip/README-ui.md
									
									
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,22 @@
 | 
				
			||||||
 | 
					# BigDL-LLM Portable Zip with Web-UI For Windows: User Guide
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Introduction
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This portable zip includes everything you need to run an LLM with BigDL-LLM optimizations and chat with it in Web-UI. Please refer to [How to use](#how-to-use) section to get started.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### 6B model running on an Intel 11-Gen Core PC (real-time screen capture)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### Verified Models
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- ChatGLM2-6b
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## How to use
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					1. Download the zip from link [here]().
 | 
				
			||||||
 | 
					2. (Optional) You could also build the zip on your own. Run `setup.bat --ui` and it will generate the zip file.
 | 
				
			||||||
 | 
					3. Unzip `bigdl-llm.zip`.
 | 
				
			||||||
 | 
					4. Download the model to your computer.
 | 
				
			||||||
 | 
					5. Go into the unzipped folder and double click `chat-ui.bat`. Input the path of the model (e.g. `path\to\model`, note that there's no slash at the end of the path). Press Enter and wait until it shows `All service started. Visit 127.0.0.1:7860 in browser to chat.`. Do NOT close the terminal window!
 | 
				
			||||||
 | 
					6. Visit `127.0.0.1:7860` in your browser and enjoy chatting!
 | 
				
			||||||
 | 
					7. If you want to stop the program, just close the terminal window.
 | 
				
			||||||
							
								
								
									
										36
									
								
								python/llm/portable-zip/chat-ui.bat
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								python/llm/portable-zip/chat-ui.bat
									
									
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,36 @@
 | 
				
			||||||
 | 
					@echo off
 | 
				
			||||||
 | 
					set /p modelpath="Please enter the model path: "
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					powershell -Command "Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.controller > zip_controller.log 2>&1 }' -NoNewWindow"
 | 
				
			||||||
 | 
					timeout /t 1 /nobreak >nul 2>&1
 | 
				
			||||||
 | 
					:loop1
 | 
				
			||||||
 | 
					powershell -Command "$output = Get-Content zip_controller.log; if($null -eq $output -or !($output | Select-String -Pattern 'Uvicorn running on')) { exit 1 } else { exit 0 }"
 | 
				
			||||||
 | 
					if errorlevel 1 (
 | 
				
			||||||
 | 
					    timeout /t 1 /nobreak >nul 2>&1
 | 
				
			||||||
 | 
					    goto loop1
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					echo [1/3] Controller started successfully
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					powershell -Command "Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m bigdl.llm.serving.model_worker --model-path %modelpath% --device cpu > zip_model_worker.log 2>&1 }' -NoNewWindow"
 | 
				
			||||||
 | 
					timeout /t 1 /nobreak >nul 2>&1
 | 
				
			||||||
 | 
					:loop2
 | 
				
			||||||
 | 
					powershell -Command "$output = Get-Content zip_model_worker.log; if($null -eq $output -or !($output | Select-String -Pattern 'Uvicorn running on')) { exit 1 } else { exit 0 }"
 | 
				
			||||||
 | 
					if errorlevel 1 (
 | 
				
			||||||
 | 
					    timeout /t 1 /nobreak >nul 2>&1
 | 
				
			||||||
 | 
					    goto loop2
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					echo [2/3] Model worker started successfully
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					powershell -Command "Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.gradio_web_server > zip_web_server.log 2>&1 }' -NoNewWindow"
 | 
				
			||||||
 | 
					timeout /t 1 /nobreak >nul 2>&1
 | 
				
			||||||
 | 
					:loop3
 | 
				
			||||||
 | 
					powershell -Command "$output = Get-Content zip_web_server.log; if($null -eq $output -or !($output | Select-String -Pattern 'Running on local URL')) { exit 1 } else { exit 0 }"
 | 
				
			||||||
 | 
					if errorlevel 1 (
 | 
				
			||||||
 | 
					    timeout /t 1 /nobreak >nul 2>&1
 | 
				
			||||||
 | 
					    goto loop3
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					echo [3/3] Web server started successfully
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					echo All service started. Visit 127.0.0.1:7860 in browser to chat.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					timeout /t -1 /nobreak >nul 2>&1
 | 
				
			||||||
| 
						 | 
					@ -17,7 +17,16 @@ powershell -Command "(gc python39._pth) -replace '%search%', '%replace%' | Out-F
 | 
				
			||||||
cd ..
 | 
					cd ..
 | 
				
			||||||
 | 
					
 | 
				
			||||||
:: install pip packages
 | 
					:: install pip packages
 | 
				
			||||||
%python-embed% -m pip install bigdl-llm[all] transformers_stream_generator tiktoken einops colorama
 | 
					%python-embed% -m pip install --pre --upgrade bigdl-llm[all]
 | 
				
			||||||
 | 
					%python-embed% -m pip install transformers_stream_generator tiktoken einops colorama
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					if "%1"=="--ui" (
 | 
				
			||||||
 | 
					    %python-embed% -m pip install --pre --upgrade bigdl-llm[serving]
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
:: compress the python and scripts
 | 
					:: compress the python and scripts
 | 
				
			||||||
powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat.bat', '.\chat.py', '.\README.md' -DestinationPath .\bigdl-llm.zip"
 | 
					if "%1"=="--ui" (
 | 
				
			||||||
 | 
					    powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat-ui.bat', '.\README.md' -DestinationPath .\bigdl-llm-ui.zip"
 | 
				
			||||||
 | 
					) else (
 | 
				
			||||||
 | 
					    powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat.bat', '.\chat.py', '.\README.md' -DestinationPath .\bigdl-llm.zip"
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
| 
						 | 
					@ -2,4 +2,10 @@
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# How to use
 | 
					# How to use
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Just simply run `setup.bat` and it will download and install all dependency and generate `bigdl-llm.zip` for user to use.
 | 
					## Build Portable Zip without Web-UI
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Run `setup.bat` to generate portable zip without Web-UI. It will download and install all dependency and generate `bigdl-llm.zip` for user to use.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Build Portable Zip with Web-UI
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Run `setup.bat --ui` to generate portable zip with Web-UI. It will download and install all dependency and generate `bigdl-llm.zip` for user to use.
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in a new issue