From b034715795e54df712bdb993076972f8f42eb2be Mon Sep 17 00:00:00 2001 From: Ayo Date: Wed, 17 Sep 2025 10:59:43 +0200 Subject: [PATCH] feat(ai): various changes --- ai-brainstorm.sh | 2 +- ai-coder.sh | 2 +- ai-search.sh | 60 ++++++++++++++++++++++++++++++++++++++++++++++++ ai.sh | 6 +++-- 4 files changed, 66 insertions(+), 4 deletions(-) create mode 100755 ai-search.sh diff --git a/ai-brainstorm.sh b/ai-brainstorm.sh index 54dee80..057f8c7 100755 --- a/ai-brainstorm.sh +++ b/ai-brainstorm.sh @@ -49,7 +49,7 @@ if ! [ "$other_args" = "" ]; then if [ "$typora_flag" = true ]; then tempfile="$(mktemp)" OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking > $tempfile - typora $tempfile + typora $tempfile > /dev/null 2>/dev/null & else # If no -t flag, just run the command normally OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking diff --git a/ai-coder.sh b/ai-coder.sh index 988e46e..96db686 100755 --- a/ai-coder.sh +++ b/ai-coder.sh @@ -49,7 +49,7 @@ if ! [ "$other_args" = "" ]; then if [ "$typora_flag" = true ]; then tempfile="$(mktemp)" OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking > $tempfile - typora $tempfile + typora $tempfile > /dev/null 2>/dev/null & else # If no -t flag, just run the command normally OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking diff --git a/ai-search.sh b/ai-search.sh new file mode 100755 index 0000000..819af71 --- /dev/null +++ b/ai-search.sh @@ -0,0 +1,60 @@ +# Load config +. ${HOME}/ayo.conf + +model=$search +modelfile=$search_modelfile + +host=$ollama_remote_host + +# Initialize variables +typora_flag=false +other_args="" + +# Process arguments to handle -t flag and collect other args +while [[ $# -gt 0 ]]; do + case $1 in + -t) + typora_flag=true + shift + ;; + *) + other_args="$other_args $1" + shift + ;; + esac +done + +# Set other_args to the first argument if it exists, otherwise empty string +if [[ -n "$other_args" ]]; then + # Remove leading space + other_args="${other_args# }" + + IFS=' ' read -ra args_array <<< "$other_args" + if [[ ${#args_array[@]} -gt 1 ]]; then + # Remove first element and rejoin remaining elements + other_args="${args_array[*]:1}" + else + # If there's only one argument, set other_args to empty string + other_args="" + fi +fi + +if ! [ "$other_args" = "" ]; then + if [ "$other_args" = "sleep" ]; then + OLLAMA_HOST=$host ollama stop $model + elif [ "$other_args" = "init" ]; then + OLLAMA_HOST=$host ollama create "$model" -f "$modelfile" + else + # If -t flag is set, use typora to display output + if [ "$typora_flag" = true ]; then + tempfile="$(mktemp)" + OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking > $tempfile + typora $tempfile > /dev/null 2>/dev/null & + else + # If no -t flag, just run the command normally + OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking + fi + fi +else + OLLAMA_HOST=$host ollama run $model --hidethinking +fi diff --git a/ai.sh b/ai.sh index 5fc93d9..153487f 100755 --- a/ai.sh +++ b/ai.sh @@ -57,11 +57,13 @@ if ! [ "$other_args" = "" ]; then elif [ "$other_args" = "wake" ]; then . $HOME/llm_env/bin/activate + unset OCL_ICD_VENDORS export OLLAMA_NUM_GPU=999 export no_proxy=localhost,127.0.0.1 export ZES_ENABLE_SYSMAN=1 source $HOME/intel/oneapi/setvars.sh export SYCL_CACHE_PERSISTENT=1 + export SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=0 $HOME/llama-cpp/ollama serve python --version @@ -76,7 +78,7 @@ if ! [ "$other_args" = "" ]; then if [ "$typora_flag" = true ]; then tempfile="$(mktemp)" OLLAMA_HOST=$host ollama run $model "$other_args" > $tempfile - typora $tempfile + typora $tempfile > /dev/null 2>/dev/null & else # If no -t flag, just run the command normally OLLAMA_HOST=$host ollama run $model "$other_args" @@ -84,5 +86,5 @@ if ! [ "$other_args" = "" ]; then fi else - OLLAMA_HOST=$host ollama run $model + OLLAMA_HOST=$host ollama run $model --think=false fi