feat(ai): various changes
This commit is contained in:
parent
7f30ef91b2
commit
b034715795
4 changed files with 66 additions and 4 deletions
|
@ -49,7 +49,7 @@ if ! [ "$other_args" = "" ]; then
|
|||
if [ "$typora_flag" = true ]; then
|
||||
tempfile="$(mktemp)"
|
||||
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking > $tempfile
|
||||
typora $tempfile
|
||||
typora $tempfile > /dev/null 2>/dev/null &
|
||||
else
|
||||
# If no -t flag, just run the command normally
|
||||
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking
|
||||
|
|
|
@ -49,7 +49,7 @@ if ! [ "$other_args" = "" ]; then
|
|||
if [ "$typora_flag" = true ]; then
|
||||
tempfile="$(mktemp)"
|
||||
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking > $tempfile
|
||||
typora $tempfile
|
||||
typora $tempfile > /dev/null 2>/dev/null &
|
||||
else
|
||||
# If no -t flag, just run the command normally
|
||||
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking
|
||||
|
|
60
ai-search.sh
Executable file
60
ai-search.sh
Executable file
|
@ -0,0 +1,60 @@
|
|||
# Load config
|
||||
. ${HOME}/ayo.conf
|
||||
|
||||
model=$search
|
||||
modelfile=$search_modelfile
|
||||
|
||||
host=$ollama_remote_host
|
||||
|
||||
# Initialize variables
|
||||
typora_flag=false
|
||||
other_args=""
|
||||
|
||||
# Process arguments to handle -t flag and collect other args
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-t)
|
||||
typora_flag=true
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
other_args="$other_args $1"
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Set other_args to the first argument if it exists, otherwise empty string
|
||||
if [[ -n "$other_args" ]]; then
|
||||
# Remove leading space
|
||||
other_args="${other_args# }"
|
||||
|
||||
IFS=' ' read -ra args_array <<< "$other_args"
|
||||
if [[ ${#args_array[@]} -gt 1 ]]; then
|
||||
# Remove first element and rejoin remaining elements
|
||||
other_args="${args_array[*]:1}"
|
||||
else
|
||||
# If there's only one argument, set other_args to empty string
|
||||
other_args=""
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! [ "$other_args" = "" ]; then
|
||||
if [ "$other_args" = "sleep" ]; then
|
||||
OLLAMA_HOST=$host ollama stop $model
|
||||
elif [ "$other_args" = "init" ]; then
|
||||
OLLAMA_HOST=$host ollama create "$model" -f "$modelfile"
|
||||
else
|
||||
# If -t flag is set, use typora to display output
|
||||
if [ "$typora_flag" = true ]; then
|
||||
tempfile="$(mktemp)"
|
||||
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking > $tempfile
|
||||
typora $tempfile > /dev/null 2>/dev/null &
|
||||
else
|
||||
# If no -t flag, just run the command normally
|
||||
OLLAMA_HOST=$host ollama run $model "$other_args" --hidethinking
|
||||
fi
|
||||
fi
|
||||
else
|
||||
OLLAMA_HOST=$host ollama run $model --hidethinking
|
||||
fi
|
6
ai.sh
6
ai.sh
|
@ -57,11 +57,13 @@ if ! [ "$other_args" = "" ]; then
|
|||
elif [ "$other_args" = "wake" ]; then
|
||||
. $HOME/llm_env/bin/activate
|
||||
|
||||
unset OCL_ICD_VENDORS
|
||||
export OLLAMA_NUM_GPU=999
|
||||
export no_proxy=localhost,127.0.0.1
|
||||
export ZES_ENABLE_SYSMAN=1
|
||||
source $HOME/intel/oneapi/setvars.sh
|
||||
export SYCL_CACHE_PERSISTENT=1
|
||||
export SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=0
|
||||
|
||||
$HOME/llama-cpp/ollama serve
|
||||
python --version
|
||||
|
@ -76,7 +78,7 @@ if ! [ "$other_args" = "" ]; then
|
|||
if [ "$typora_flag" = true ]; then
|
||||
tempfile="$(mktemp)"
|
||||
OLLAMA_HOST=$host ollama run $model "$other_args" > $tempfile
|
||||
typora $tempfile
|
||||
typora $tempfile > /dev/null 2>/dev/null &
|
||||
else
|
||||
# If no -t flag, just run the command normally
|
||||
OLLAMA_HOST=$host ollama run $model "$other_args"
|
||||
|
@ -84,5 +86,5 @@ if ! [ "$other_args" = "" ]; then
|
|||
fi
|
||||
|
||||
else
|
||||
OLLAMA_HOST=$host ollama run $model
|
||||
OLLAMA_HOST=$host ollama run $model --think=false
|
||||
fi
|
||||
|
|
Loading…
Reference in a new issue