[LLM]llm gptneox chat (#8527)

* linux

* support win

* merge upstream & support vnni lib in chat
This commit is contained in:
Yina Chen 2023-07-18 11:17:17 +08:00 committed by GitHub
parent 1ebc43b151
commit 4582b6939d
2 changed files with 10 additions and 4 deletions

View file

@ -47,6 +47,9 @@ function llama {
} }
function gptneox { function gptneox {
PROMPT="A chat between a curious human <human> and an artificial intelligence assistant <bot>.\
The assistant gives helpful, detailed, and polite answers to the human's questions."
EXTRA_ARGS+=('--instruct' '-p' '"$PROMPT"')
command="$lib_dir/main-gptneox_$avx_flag -t $threads -n $n_predict ${filteredArguments[*]} ${EXTRA_ARGS[*]}" command="$lib_dir/main-gptneox_$avx_flag -t $threads -n $n_predict ${filteredArguments[*]} ${EXTRA_ARGS[*]}"
echo "$command" echo "$command"
eval "$command" eval "$command"
@ -86,7 +89,6 @@ echo "AVX Flags: $avx_flag"
if [[ "$model_family" == "llama" ]]; then if [[ "$model_family" == "llama" ]]; then
llama llama
elif [[ "$model_family" == "gptneox" ]]; then elif [[ "$model_family" == "gptneox" ]]; then
# TODO
gptneox gptneox
else else
echo "llm-chat does not support model_family $model_family for now." echo "llm-chat does not support model_family $model_family for now."

View file

@ -2,6 +2,7 @@ $llm_dir = (Split-Path -Parent (python -c "import bigdl.llm;print(bigdl.llm.__fi
$lib_dir = Join-Path $llm_dir "libs" $lib_dir = Join-Path $llm_dir "libs"
$prompt_dir = Join-Path $llm_dir "cli/prompts" $prompt_dir = Join-Path $llm_dir "cli/prompts"
$vnni_enable = ((python -c "from bigdl.llm.utils.isa_checker import check_avx_vnni;print(check_avx_vnni())").ToLower() -eq "true")
$model_family = "" $model_family = ""
$threads = 8 $threads = 8
# Number of tokens to predict (made it larger than default because we want a long interaction) # Number of tokens to predict (made it larger than default because we want a long interaction)
@ -23,16 +24,19 @@ function Display-Help
function llama function llama
{ {
$exec_file = if ($vnni_enable) { "main-llama_vnni.exe" } else { "main-llama.exe" }
$prompt_file = Join-Path $prompt_dir "chat-with-llm.txt" $prompt_file = Join-Path $prompt_dir "chat-with-llm.txt"
$command = "$lib_dir/main-llama.exe -t $threads -n $n_predict -f $prompt_file -i --color --reverse-prompt 'USER:' --in-prefix ' ' $filteredArguments" $command = "$lib_dir/$exec_file -t $threads -n $n_predict -f $prompt_file -i --color --reverse-prompt 'USER:' --in-prefix ' ' $filteredArguments"
Write-Host "$command" Write-Host "$command"
Invoke-Expression $command Invoke-Expression $command
} }
function gptneox function gptneox
{ {
# TODO $exec_file = if ($vnni_enable) { "main-gptneox_vnni.exe" } else { "main-gptneox.exe" }
$command = "$lib_dir/main-gptneox.exe -t $threads -n $n_predict $filteredArguments" $prompt = "A chat between a curious human and an artificial intelligence assistant.`
The assistant gives helpful, detailed, and polite answers."
$command = "$lib_dir/$exec_file -t $threads -n $n_predict --color --instruct -p '$prompt' $filteredArguments"
Write-Host "$command" Write-Host "$command"
Invoke-Expression $command Invoke-Expression $command
} }