[LLM] Support for new arc ut runner (#10311)

* Support for new arc ut runner

* Comment unnecessary OMP_NUM_THREADS related settings for arc uts
This commit is contained in:
Yuwen Hu 2024-03-04 18:42:02 +08:00 committed by GitHub
parent d45e577d8c
commit 5dbbe1a826
3 changed files with 17 additions and 17 deletions

View file

@ -216,10 +216,10 @@ jobs:
matrix:
pytorch-version: ['2.1', '2.0']
python-version: ${{ fromJson(needs.setup-python-version.outputs.python-version) }}
runs-on: [self-hosted, llm, arc]
runs-on: [self-hosted, llm, arc-ut]
env:
OMP_NUM_THREADS: 16
THREAD_NUM: 16
# OMP_NUM_THREADS: 16
# THREAD_NUM: 16
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
steps:
- name: Set environment variables
@ -266,9 +266,9 @@ jobs:
run: |
# Specific oneapi position on arc ut test machines
if [[ '${{ matrix.pytorch-version }}' == '2.1' ]]; then
source /home/arda/intel/oneapi/setvars.sh
elif [[ '${{ matrix.pytorch-version }}' == '2.0' ]]; then
source /opt/intel/oneapi/setvars.sh
elif [[ '${{ matrix.pytorch-version }}' == '2.0' ]]; then
source /home/arda/intel/oneapi/setvars.sh
fi
bash python/llm/test/run-llm-install-tests.sh
@ -324,9 +324,9 @@ jobs:
run: |
# Specific oneapi position on arc ut test machines
if [[ '${{ matrix.pytorch-version }}' == '2.1' ]]; then
source /home/arda/intel/oneapi/setvars.sh
elif [[ '${{ matrix.pytorch-version }}' == '2.0' ]]; then
source /opt/intel/oneapi/setvars.sh
elif [[ '${{ matrix.pytorch-version }}' == '2.0' ]]; then
source /home/arda/intel/oneapi/setvars.sh
fi
python -m pip install datasets librosa soundfile einops tiktoken transformers_stream_generator
bash python/llm/test/run-llm-inference-tests-gpu.sh
@ -341,8 +341,8 @@ jobs:
python -m pip install bitsandbytes scipy
# Specific oneapi position on arc ut test machines
if [[ '${{ matrix.pytorch-version }}' == '2.1' ]]; then
source /home/arda/intel/oneapi/setvars.sh
elif [[ '${{ matrix.pytorch-version }}' == '2.0' ]]; then
source /opt/intel/oneapi/setvars.sh
elif [[ '${{ matrix.pytorch-version }}' == '2.0' ]]; then
source /home/arda/intel/oneapi/setvars.sh
fi
bash python/llm/test/run-llm-example-tests-gpu.sh

View file

@ -13,10 +13,10 @@ set -e
echo "# Start testing inference"
start=$(date "+%s")
if [ -z "$THREAD_NUM" ]; then
THREAD_NUM=2
fi
export OMP_NUM_THREADS=$THREAD_NUM
# if [ -z "$THREAD_NUM" ]; then
# THREAD_NUM=2
# fi
# export OMP_NUM_THREADS=$THREAD_NUM
export BIGDL_LLM_XMX_DISABLED=1
pytest ${LLM_INFERENCE_TEST_DIR}/test_transformers_api_mlp.py -v -s -k "Mistral"
unset BIGDL_LLM_XMX_DISABLED

View file

@ -13,10 +13,10 @@ set -e
echo "# Start testing inference"
start=$(date "+%s")
if [ -z "$THREAD_NUM" ]; then
THREAD_NUM=2
fi
export OMP_NUM_THREADS=$THREAD_NUM
# if [ -z "$THREAD_NUM" ]; then
# THREAD_NUM=2
# fi
# export OMP_NUM_THREADS=$THREAD_NUM
pytest ${LLM_INFERENCE_TEST_DIR}/test_transformers_api.py -v -s
export BIGDL_LLM_XMX_DISABLED=1
pytest ${LLM_INFERENCE_TEST_DIR}/test_transformers_api_final_logits.py -v -s