ipex-llm/python/llm/test/run-llm-inference-tests.sh
Song Jiaming 411d896636 LLM first transformers UT (#8514)
* ut

* transformers api first ut

* name

* dir issue

* use chatglm instead of chatglm2

* omp

* set omp in sh

* source

* taskset

* test

* test omp

* add test
2023-07-20 10:16:27 +08:00

22 lines
574 B
Bash

#!/bin/bash
export ANALYTICS_ZOO_ROOT=${ANALYTICS_ZOO_ROOT}
export LLM_HOME=${ANALYTICS_ZOO_ROOT}/python/llm/src
export LLM_INFERENCE_TEST_DIR=${ANALYTICS_ZOO_ROOT}/python/llm/test/inference
source bigdl-nano-init
set -e
echo "# Start testing inference"
start=$(date "+%s")
python -m pytest -s ${LLM_INFERENCE_TEST_DIR} -k "not test_transformers_int4"
export OMP_NUM_THREADS=24
taskset -c 0-23 python -m pytest -s ${LLM_INFERENCE_TEST_DIR} -k test_transformers_int4
now=$(date "+%s")
time=$((now-start))
echo "Bigdl-llm tests finished"
echo "Time used:$time seconds"