refactor(ai): use switch-case
This commit is contained in:
parent
2883e00cdf
commit
ac14e6eed7
1 changed files with 66 additions and 47 deletions
113
ai.sh
113
ai.sh
|
|
@ -9,7 +9,7 @@ model=$helper
|
||||||
modelfile=$helper_modelfile
|
modelfile=$helper_modelfile
|
||||||
|
|
||||||
# Initialize variables
|
# Initialize variables
|
||||||
other_args=${@:2}
|
command=${@:2}
|
||||||
typora_flag=false
|
typora_flag=false
|
||||||
|
|
||||||
# parse string args (when used as a function and passed "$@")
|
# parse string args (when used as a function and passed "$@")
|
||||||
|
|
@ -27,59 +27,78 @@ while [[ $# -gt 0 ]]; do
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
if ! [ "$other_args" = "" ]; then
|
function main() {
|
||||||
if [ "$other_args" = "open-webui" ]; then
|
case $command in
|
||||||
|
"open-webui")
|
||||||
. $HOME/open-webui/.venv/bin/activate
|
. $HOME/open-webui/.venv/bin/activate
|
||||||
open-webui serve
|
open-webui serve
|
||||||
python --version
|
python --version
|
||||||
deactivate
|
deactivate
|
||||||
elif [ "$other_args" = "remote" ]; then
|
;;
|
||||||
|
"remote")
|
||||||
export OLLAMA_HOST=192.168.0.6
|
export OLLAMA_HOST=192.168.0.6
|
||||||
elif [ "$other_args" = "list" ]; then
|
;;
|
||||||
OLLAMA_HOST=$host ollama list
|
"list")
|
||||||
elif [ "$other_args" = "ps" ]; then
|
OLLAMA_HOST=$host ollama list
|
||||||
OLLAMA_HOST=$host ollama ps
|
;;
|
||||||
elif [ "$other_args" = "rm" ]; then
|
"ps")
|
||||||
OLLAMA_HOST=$host ollama rm "$3"
|
OLLAMA_HOST=$host ollama ps
|
||||||
elif [ "$other_args" = "init" ]; then
|
;;
|
||||||
OLLAMA_HOST=$host ollama create $model -f $modelfile
|
"rm")
|
||||||
elif [ "$other_args" = "wake" ]; then
|
OLLAMA_HOST=$host ollama rm "$3"
|
||||||
. $HOME/llm_env/bin/activate
|
;;
|
||||||
|
"init")
|
||||||
|
OLLAMA_HOST=$host ollama create $model -f $modelfile
|
||||||
|
;;
|
||||||
|
"wake")
|
||||||
|
. $HOME/llm_env/bin/activate
|
||||||
|
|
||||||
unset OCL_ICD_VENDORS
|
unset OCL_ICD_VENDORS
|
||||||
export OLLAMA_NUM_GPU=999
|
export OLLAMA_NUM_GPU=999
|
||||||
export no_proxy=localhost,127.0.0.1
|
export no_proxy=localhost,127.0.0.1
|
||||||
export ZES_ENABLE_SYSMAN=1
|
export ZES_ENABLE_SYSMAN=1
|
||||||
source $HOME/intel/oneapi/setvars.sh
|
source $HOME/intel/oneapi/setvars.sh
|
||||||
export SYCL_CACHE_PERSISTENT=1
|
export SYCL_CACHE_PERSISTENT=1
|
||||||
export SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=0
|
export SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=0
|
||||||
|
|
||||||
$HOME/llama-cpp/ollama serve
|
$HOME/llama-cpp/ollama serve
|
||||||
python --version
|
python --version
|
||||||
deactivate
|
deactivate
|
||||||
|
|
||||||
echo $ZES_ENABLE_SYSMAN
|
echo $ZES_ENABLE_SYSMAN
|
||||||
echo $SYCL_CACHE_PERSISTENT
|
echo $SYCL_CACHE_PERSISTENT
|
||||||
elif [ "$other_args" = "update" ]; then
|
;;
|
||||||
curl -fsSL https://ollama.com/install.sh | sh
|
"update")
|
||||||
echo "See instructions on how to expose ollama in the local network: https://git.ayo.run/ayo/scripts/src/branch/main/expose-ollama.md"
|
curl -fsSL https://ollama.com/install.sh | sh
|
||||||
elif [ "$other_args" = "sleep" ]; then
|
echo "See instructions on how to expose ollama in the local network: https://git.ayo.run/ayo/scripts/src/branch/main/expose-ollama.md"
|
||||||
OLLAMA_HOST=$host ollama stop $model
|
;;
|
||||||
else
|
"sleep")
|
||||||
# If -t flag is set, use typora to display output
|
OLLAMA_HOST=$host ollama stop $model
|
||||||
if [ "$typora_flag" = true ]; then
|
;;
|
||||||
tempfile="$(mktemp)"
|
"")
|
||||||
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking > $tempfile
|
OLLAMA_HOST=$host ollama run $model --hidethinking
|
||||||
typora $tempfile > /dev/null 2>/dev/null &
|
;;
|
||||||
else
|
*)
|
||||||
# If no -t flag, just run the command normally
|
# If -t flag is set, use typora to display output
|
||||||
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking
|
if [ "$typora_flag" = true ]; then
|
||||||
fi
|
tempfile="$(mktemp)"
|
||||||
fi
|
OLLAMA_HOST=$host ollama run $model "$command" --hidethinking > $tempfile
|
||||||
|
typora $tempfile > /dev/null 2>/dev/null &
|
||||||
|
else
|
||||||
|
# If no -t flag, just run the command normally
|
||||||
|
OLLAMA_HOST=$host ollama run $model "$command" --hidethinking
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
else
|
# release memory
|
||||||
OLLAMA_HOST=$host ollama run $model --hidethinking
|
OLLAMA_HOST=$host ollama stop $model
|
||||||
fi
|
}
|
||||||
|
|
||||||
# release memory
|
start_time=$(date +%s%N)
|
||||||
OLLAMA_HOST=$host ollama stop $model
|
main $@
|
||||||
|
end_time=$(date +%s%N)
|
||||||
|
duration=$((end_time - start_time))
|
||||||
|
duration_ms=$(echo "scale=3; $duration / 1000000" | bc)
|
||||||
|
duration_s=$(echo "scale=3; $duration_ms / 1000" | bc)
|
||||||
|
echo "Took $duration_s s"
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue