[LLM] Small update to performance tests (#9106)
* small updates to llm performance tests regarding model handling * Small fix
This commit is contained in:
parent
edccfb2ed3
commit
65212451cc
2 changed files with 35 additions and 10 deletions
38
.github/workflows/llm_performance_tests.yml
vendored
38
.github/workflows/llm_performance_tests.yml
vendored
|
|
@ -36,6 +36,10 @@ jobs:
|
|||
env:
|
||||
THREAD_NUM: 24
|
||||
steps:
|
||||
- name: Set environment variables
|
||||
shell: bash
|
||||
run: |
|
||||
echo "LLAMA2_7B_ORIGIN_PATH=${ORIGIN_DIR}/Llama-2-7b-chat-hf" >> "$GITHUB_ENV"
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
|
|
@ -55,6 +59,14 @@ jobs:
|
|||
env:
|
||||
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
|
||||
|
||||
- name: Download LLMs
|
||||
shell: bash
|
||||
run: |
|
||||
if [ ! -d $LLAMA2_7B_ORIGIN_PATH ]; then
|
||||
echo "Directory $LLAMA2_7B_ORIGIN_PATH not found. Downloading from FTP server..."
|
||||
wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/llm/Llama-2-7b-chat-hf -P $ORIGIN_DIR
|
||||
fi
|
||||
|
||||
- name: Run LLM Performance test
|
||||
env:
|
||||
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
|
||||
|
|
@ -76,10 +88,6 @@ jobs:
|
|||
THREAD_NUM: 16
|
||||
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
|
||||
steps:
|
||||
- name: Set model directories
|
||||
shell: bash
|
||||
run: |
|
||||
echo "ORIGIN_DIR=/mnt/disk1/models" >> "$GITHUB_ENV"
|
||||
- name: Set environment variables
|
||||
shell: bash
|
||||
run: |
|
||||
|
|
@ -87,6 +95,7 @@ jobs:
|
|||
echo "LLAMA2_13B_ORIGIN_PATH=${ORIGIN_DIR}/Llama-2-13b-chat-hf" >> "$GITHUB_ENV"
|
||||
echo "CHATGLM2_6B_ORIGIN_PATH=${ORIGIN_DIR}/chatglm2-6b" >> "$GITHUB_ENV"
|
||||
echo "WHISPER_MEDIUM_ORIGIN_PATH=${ORIGIN_DIR}/whisper-medium" >> "$GITHUB_ENV"
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
|
|
@ -112,6 +121,27 @@ jobs:
|
|||
run: |
|
||||
source /opt/intel/oneapi/setvars.sh
|
||||
bash python/llm/test/run-llm-install-tests.sh
|
||||
|
||||
- name: Download LLMs
|
||||
shell: bash
|
||||
run: |
|
||||
if [ ! -d $LLAMA2_7B_ORIGIN_PATH ]; then
|
||||
echo "Directory $LLAMA2_7B_ORIGIN_PATH not found. Downloading from FTP server..."
|
||||
wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/llm/Llama-2-7b-chat-hf -P $ORIGIN_DIR
|
||||
fi
|
||||
if [ ! -d $LLAMA2_13B_ORIGIN_PATH ]; then
|
||||
echo "Directory $LLAMA2_13B_ORIGIN_PATH not found. Downloading from FTP server..."
|
||||
wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/llm/Llama-2-13b-chat-hf -P $ORIGIN_DIR
|
||||
fi
|
||||
if [ ! -d $CHATGLM2_6B_ORIGIN_PATH ]; then
|
||||
echo "Directory $CHATGLM2_6B_ORIGIN_PATH not found. Downloading from FTP server..."
|
||||
wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/llm/chatglm2-6b -P $ORIGIN_DIR
|
||||
fi
|
||||
if [ ! -d $WHISPER_MEDIUM_ORIGIN_PATH ]; then
|
||||
echo "Directory $WHISPER_MEDIUM_ORIGIN_PATH not found. Downloading from FTP server..."
|
||||
wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/llm/whisper-medium -P $ORIGIN_DIR
|
||||
fi
|
||||
|
||||
- name: Test on xpu
|
||||
shell: bash
|
||||
run: |
|
||||
|
|
|
|||
|
|
@ -12,11 +12,6 @@ export OMP_NUM_THREADS=$THREAD_NUM
|
|||
######## LLAMA2
|
||||
# transformers
|
||||
|
||||
if [ ! -d $ORIGINAL_LLAMA2_PATH ]; then
|
||||
echo "Directory $ORIGINAL_LLAMA2_PATH not found. Downloading from FTP server..."
|
||||
wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/${ORIGINAL_LLAMA2_PATH:2} -P $LLM_DIR
|
||||
fi
|
||||
|
||||
echo ">>> Testing LLAMA2 transformers API"
|
||||
taskset -c 0-$((THREAD_NUM - 1)) python python/llm/dev/benchmark/pipelines/llama2_test.py --repo-id-or-model-path $ORIGINAL_LLAMA2_PATH
|
||||
taskset -c 0-$((THREAD_NUM - 1)) python python/llm/dev/benchmark/pipelines/llama2_test.py --repo-id-or-model-path $LLAMA2_7B_ORIGIN_PATH
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue