Migrate portable zip to ipex-llm (#10617)
* change portable zip prompt to ipex-llm * fix chat with ui * add no proxy
This commit is contained in:
parent
9d8ba64c0d
commit
56dfcb2ade
3 changed files with 9 additions and 9 deletions
|
|
@ -1,7 +1,7 @@
|
||||||
@echo off
|
@echo off
|
||||||
set /p modelpath="Please enter the model path: "
|
set /p modelpath="Please enter the model path: "
|
||||||
|
|
||||||
powershell -Command "Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.controller > zip_controller.log 2>&1 }' -NoNewWindow"
|
powershell -Command "& { $env:no_proxy='localhost,127.0.0.1'; Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.controller > zip_controller.log 2>&1 }' -NoNewWindow }"
|
||||||
timeout /t 1 /nobreak >nul 2>&1
|
timeout /t 1 /nobreak >nul 2>&1
|
||||||
:loop1
|
:loop1
|
||||||
powershell -Command "$output = Get-Content zip_controller.log; if($null -eq $output -or !($output | Select-String -Pattern 'Uvicorn running on')) { exit 1 } else { exit 0 }"
|
powershell -Command "$output = Get-Content zip_controller.log; if($null -eq $output -or !($output | Select-String -Pattern 'Uvicorn running on')) { exit 1 } else { exit 0 }"
|
||||||
|
|
@ -11,7 +11,7 @@ if errorlevel 1 (
|
||||||
)
|
)
|
||||||
echo [1/3] Controller started successfully
|
echo [1/3] Controller started successfully
|
||||||
|
|
||||||
powershell -Command "Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m ipex_llm.serving.model_worker --model-path %modelpath% --device cpu > zip_model_worker.log 2>&1 }' -NoNewWindow"
|
powershell -Command "& { $env:no_proxy='localhost,127.0.0.1'; Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m ipex_llm.serving.fastchat.ipex_llm_worker --model-path %modelpath% --device cpu --low-bit "sym_int4" --trust-remote-code > zip_model_worker.log 2>&1 }' -NoNewWindow }"
|
||||||
timeout /t 1 /nobreak >nul 2>&1
|
timeout /t 1 /nobreak >nul 2>&1
|
||||||
:loop2
|
:loop2
|
||||||
powershell -Command "$output = Get-Content zip_model_worker.log; if($null -eq $output -or !($output | Select-String -Pattern 'Uvicorn running on')) { exit 1 } else { exit 0 }"
|
powershell -Command "$output = Get-Content zip_model_worker.log; if($null -eq $output -or !($output | Select-String -Pattern 'Uvicorn running on')) { exit 1 } else { exit 0 }"
|
||||||
|
|
@ -21,7 +21,7 @@ if errorlevel 1 (
|
||||||
)
|
)
|
||||||
echo [2/3] Model worker started successfully
|
echo [2/3] Model worker started successfully
|
||||||
|
|
||||||
powershell -Command "Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.gradio_web_server > zip_web_server.log 2>&1 }' -NoNewWindow"
|
powershell -Command "& { $env:no_proxy='localhost,127.0.0.1'; Start-Process -FilePath PowerShell -ArgumentList '-Command', '& { .\python-embed\python.exe -m fastchat.serve.gradio_web_server > zip_web_server.log 2>&1 }' -NoNewWindow }"
|
||||||
timeout /t 1 /nobreak >nul 2>&1
|
timeout /t 1 /nobreak >nul 2>&1
|
||||||
:loop3
|
:loop3
|
||||||
powershell -Command "$output = Get-Content zip_web_server.log; if($null -eq $output -or !($output | Select-String -Pattern 'Running on local URL')) { exit 1 } else { exit 0 }"
|
powershell -Command "$output = Get-Content zip_web_server.log; if($null -eq $output -or !($output | Select-String -Pattern 'Running on local URL')) { exit 1 } else { exit 0 }"
|
||||||
|
|
|
||||||
|
|
@ -70,7 +70,7 @@ def get_stop_words_ids(chat_format, tokenizer):
|
||||||
|
|
||||||
@torch.no_grad()
|
@torch.no_grad()
|
||||||
def greedy_generate(model, tokenizer, input_ids, past_key_values, max_gen_len, stop_words=[]):
|
def greedy_generate(model, tokenizer, input_ids, past_key_values, max_gen_len, stop_words=[]):
|
||||||
print(Fore.BLUE+"BigDL-LLM: "+Fore.RESET, end="")
|
print(Fore.BLUE+"IPEX-LLM: "+Fore.RESET, end="")
|
||||||
outputs = model(
|
outputs = model(
|
||||||
input_ids=input_ids,
|
input_ids=input_ids,
|
||||||
past_key_values=past_key_values,
|
past_key_values=past_key_values,
|
||||||
|
|
@ -151,7 +151,7 @@ def chatglm3_stream_chat(model, tokenizer):
|
||||||
# let's stop the conversation when user input "stop"
|
# let's stop the conversation when user input "stop"
|
||||||
if user_input == "stop":
|
if user_input == "stop":
|
||||||
break
|
break
|
||||||
print(Fore.BLUE+"BigDL-LLM: "+Fore.RESET, end="")
|
print(Fore.BLUE+"IPEX-LLM: "+Fore.RESET, end="")
|
||||||
# https://github.com/THUDM/ChatGLM3/blob/main/PROMPT_en.md
|
# https://github.com/THUDM/ChatGLM3/blob/main/PROMPT_en.md
|
||||||
prompt = f"""
|
prompt = f"""
|
||||||
<|system|>
|
<|system|>
|
||||||
|
|
|
||||||
|
|
@ -17,16 +17,16 @@ powershell -Command "(gc python39._pth) -replace '%search%', '%replace%' | Out-F
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
:: install pip packages
|
:: install pip packages
|
||||||
%python-embed% -m pip install --pre --upgrade bigdl-llm[all]
|
%python-embed% -m pip install --pre --upgrade ipex-llm[all]
|
||||||
%python-embed% -m pip install transformers_stream_generator tiktoken einops colorama
|
%python-embed% -m pip install transformers_stream_generator tiktoken einops colorama
|
||||||
|
|
||||||
if "%1"=="--ui" (
|
if "%1"=="--ui" (
|
||||||
%python-embed% -m pip install --pre --upgrade bigdl-llm[serving]
|
%python-embed% -m pip install --pre --upgrade ipex-llm[serving]
|
||||||
)
|
)
|
||||||
|
|
||||||
:: compress the python and scripts
|
:: compress the python and scripts
|
||||||
if "%1"=="--ui" (
|
if "%1"=="--ui" (
|
||||||
powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat-ui.bat', '.\README.md' -DestinationPath .\bigdl-llm-ui.zip"
|
powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat-ui.bat', '.\README.md' -DestinationPath .\ipex-llm-ui.zip"
|
||||||
) else (
|
) else (
|
||||||
powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat.bat', '.\chat.py', '.\README.md' -DestinationPath .\bigdl-llm.zip"
|
powershell -Command "Compress-Archive -Path '.\python-embed', '.\chat.bat', '.\chat.py', '.\README.md' -DestinationPath .\ipex-llm.zip"
|
||||||
)
|
)
|
||||||
Loading…
Reference in a new issue