diff --git a/python/llm/portable-zip/chat-ui.bat b/python/llm/portable-zip/chat-ui.bat index cb43dc66..2a9cf7cc 100644 --- a/python/llm/portable-zip/chat-ui.bat +++ b/python/llm/portable-zip/chat-ui.bat @@ -1,7 +1,7 @@ @echo off set /p modelpath="Please enter the model path: " -powershell -Command "Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.controller > zip_controller.log 2>&1 }' -NoNewWindow" +powershell -Command "& { $env:no_proxy='localhost,127.0.0.1'; Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.controller > zip_controller.log 2>&1 }' -NoNewWindow }" timeout /t 1 /nobreak >nul 2>&1 :loop1 powershell -Command "$output = Get-Content zip_controller.log; if($null -eq $output -or !($output | Select-String -Pattern 'Uvicorn running on')) { exit 1 } else { exit 0 }" @@ -11,7 +11,7 @@ if errorlevel 1 ( ) echo [1/3] Controller started successfully -powershell -Command "Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m ipex_llm.serving.model_worker --model-path %modelpath% --device cpu > zip_model_worker.log 2>&1 }' -NoNewWindow" +powershell -Command "& { $env:no_proxy='localhost,127.0.0.1'; Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m ipex_llm.serving.fastchat.ipex_llm_worker --model-path %modelpath% --device cpu --low-bit "sym_int4" --trust-remote-code > zip_model_worker.log 2>&1 }' -NoNewWindow }" timeout /t 1 /nobreak >nul 2>&1 :loop2 powershell -Command "$output = Get-Content zip_model_worker.log; if($null -eq $output -or !($output | Select-String -Pattern 'Uvicorn running on')) { exit 1 } else { exit 0 }" @@ -21,7 +21,7 @@ if errorlevel 1 ( ) echo [2/3] Model worker started successfully -powershell -Command "Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.gradio_web_server > zip_web_server.log 2>&1 }' -NoNewWindow" +powershell -Command "& { $env:no_proxy='localhost,127.0.0.1'; Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.gradio_web_server > zip_web_server.log 2>&1 }' -NoNewWindow }" timeout /t 1 /nobreak >nul 2>&1 :loop3 powershell -Command "$output = Get-Content zip_web_server.log; if($null -eq $output -or !($output | Select-String -Pattern 'Running on local URL')) { exit 1 } else { exit 0 }" diff --git a/python/llm/portable-zip/chat.py b/python/llm/portable-zip/chat.py index 439bdd31..cf743a9a 100644 --- a/python/llm/portable-zip/chat.py +++ b/python/llm/portable-zip/chat.py @@ -70,7 +70,7 @@ def get_stop_words_ids(chat_format, tokenizer): @torch.no_grad() def greedy_generate(model, tokenizer, input_ids, past_key_values, max_gen_len, stop_words=[]): - print(Fore.BLUE+"BigDL-LLM: "+Fore.RESET, end="") + print(Fore.BLUE+"IPEX-LLM: "+Fore.RESET, end="") outputs = model( input_ids=input_ids, past_key_values=past_key_values, @@ -151,7 +151,7 @@ def chatglm3_stream_chat(model, tokenizer): # let's stop the conversation when user input "stop" if user_input == "stop": break - print(Fore.BLUE+"BigDL-LLM: "+Fore.RESET, end="") + print(Fore.BLUE+"IPEX-LLM: "+Fore.RESET, end="") # https://github.com/THUDM/ChatGLM3/blob/main/PROMPT_en.md prompt = f""" <|system|> diff --git a/python/llm/portable-zip/setup.bat b/python/llm/portable-zip/setup.bat index b509a2b2..7e5040b8 100644 --- a/python/llm/portable-zip/setup.bat +++ b/python/llm/portable-zip/setup.bat @@ -17,16 +17,16 @@ powershell -Command "(gc python39._pth) -replace '%search%', '%replace%' | Out-F cd .. :: install pip packages -%python-embed% -m pip install --pre --upgrade bigdl-llm[all] +%python-embed% -m pip install --pre --upgrade ipex-llm[all] %python-embed% -m pip install transformers_stream_generator tiktoken einops colorama if "%1"=="--ui" ( - %python-embed% -m pip install --pre --upgrade bigdl-llm[serving] + %python-embed% -m pip install --pre --upgrade ipex-llm[serving] ) :: compress the python and scripts if "%1"=="--ui" ( - powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat-ui.bat', '.\README.md' -DestinationPath .\bigdl-llm-ui.zip" + powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat-ui.bat', '.\README.md' -DestinationPath .\ipex-llm-ui.zip" ) else ( - powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat.bat', '.\chat.py', '.\README.md' -DestinationPath .\bigdl-llm.zip" + powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat.bat', '.\chat.py', '.\README.md' -DestinationPath .\ipex-llm.zip" ) \ No newline at end of file