refactor(ai): use switch-case

This commit is contained in:
ayo 2026-03-07 11:25:32 +01:00
parent 2883e00cdf
commit ac14e6eed7

57
ai.sh
View file

@ -9,7 +9,7 @@ model=$helper
modelfile=$helper_modelfile modelfile=$helper_modelfile
# Initialize variables # Initialize variables
other_args=${@:2} command=${@:2}
typora_flag=false typora_flag=false
# parse string args (when used as a function and passed "$@") # parse string args (when used as a function and passed "$@")
@ -27,23 +27,30 @@ while [[ $# -gt 0 ]]; do
esac esac
done done
if ! [ "$other_args" = "" ]; then function main() {
if [ "$other_args" = "open-webui" ]; then case $command in
"open-webui")
. $HOME/open-webui/.venv/bin/activate . $HOME/open-webui/.venv/bin/activate
open-webui serve open-webui serve
python --version python --version
deactivate deactivate
elif [ "$other_args" = "remote" ]; then ;;
"remote")
export OLLAMA_HOST=192.168.0.6 export OLLAMA_HOST=192.168.0.6
elif [ "$other_args" = "list" ]; then ;;
"list")
OLLAMA_HOST=$host ollama list OLLAMA_HOST=$host ollama list
elif [ "$other_args" = "ps" ]; then ;;
"ps")
OLLAMA_HOST=$host ollama ps OLLAMA_HOST=$host ollama ps
elif [ "$other_args" = "rm" ]; then ;;
"rm")
OLLAMA_HOST=$host ollama rm "$3" OLLAMA_HOST=$host ollama rm "$3"
elif [ "$other_args" = "init" ]; then ;;
"init")
OLLAMA_HOST=$host ollama create $model -f $modelfile OLLAMA_HOST=$host ollama create $model -f $modelfile
elif [ "$other_args" = "wake" ]; then ;;
"wake")
. $HOME/llm_env/bin/activate . $HOME/llm_env/bin/activate
unset OCL_ICD_VENDORS unset OCL_ICD_VENDORS
@ -60,26 +67,38 @@ if ! [ "$other_args" = "" ]; then
echo $ZES_ENABLE_SYSMAN echo $ZES_ENABLE_SYSMAN
echo $SYCL_CACHE_PERSISTENT echo $SYCL_CACHE_PERSISTENT
elif [ "$other_args" = "update" ]; then ;;
"update")
curl -fsSL https://ollama.com/install.sh | sh curl -fsSL https://ollama.com/install.sh | sh
echo "See instructions on how to expose ollama in the local network: https://git.ayo.run/ayo/scripts/src/branch/main/expose-ollama.md" echo "See instructions on how to expose ollama in the local network: https://git.ayo.run/ayo/scripts/src/branch/main/expose-ollama.md"
elif [ "$other_args" = "sleep" ]; then ;;
"sleep")
OLLAMA_HOST=$host ollama stop $model OLLAMA_HOST=$host ollama stop $model
else ;;
"")
OLLAMA_HOST=$host ollama run $model --hidethinking
;;
*)
# If -t flag is set, use typora to display output # If -t flag is set, use typora to display output
if [ "$typora_flag" = true ]; then if [ "$typora_flag" = true ]; then
tempfile="$(mktemp)" tempfile="$(mktemp)"
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking > $tempfile OLLAMA_HOST=$host ollama run $model "$command" --hidethinking > $tempfile
typora $tempfile > /dev/null 2>/dev/null & typora $tempfile > /dev/null 2>/dev/null &
else else
# If no -t flag, just run the command normally # If no -t flag, just run the command normally
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking OLLAMA_HOST=$host ollama run $model "$command" --hidethinking
fi
fi
else
OLLAMA_HOST=$host ollama run $model --hidethinking
fi fi
;;
esac
# release memory # release memory
OLLAMA_HOST=$host ollama stop $model OLLAMA_HOST=$host ollama stop $model
}
start_time=$(date +%s%N)
main $@
end_time=$(date +%s%N)
duration=$((end_time - start_time))
duration_ms=$(echo "scale=3; $duration / 1000000" | bc)
duration_s=$(echo "scale=3; $duration_ms / 1000" | bc)
echo "Took $duration_s s"