Support pr validate perf test (#11486)
* Support triggering performance tests through commits * Small fix * Small fix * Small fixes
This commit is contained in:
parent
4390e7dc49
commit
bb6953c19e
2 changed files with 94 additions and 77 deletions
40
.github/workflows/llm-binary-build.yml
vendored
40
.github/workflows/llm-binary-build.yml
vendored
|
|
@ -11,26 +11,26 @@ permissions:
|
|||
# Controls when the action will run.
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events but only for the main branch
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- ".github/workflows/llm-binary-build.yml"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- ".github/workflows/llm-binary-build.yml"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
llmcpp-ref:
|
||||
description: 'Ref of llm.cpp code'
|
||||
default: ''
|
||||
required: false
|
||||
type: string
|
||||
platform:
|
||||
description: 'Platforms to built on'
|
||||
default: '["Windows", "Linux"]'
|
||||
required: false
|
||||
type: string
|
||||
# push:
|
||||
# branches: [main]
|
||||
# paths:
|
||||
# - ".github/workflows/llm-binary-build.yml"
|
||||
# pull_request:
|
||||
# branches: [main]
|
||||
# paths:
|
||||
# - ".github/workflows/llm-binary-build.yml"
|
||||
# workflow_dispatch:
|
||||
# inputs:
|
||||
# llmcpp-ref:
|
||||
# description: 'Ref of llm.cpp code'
|
||||
# default: ''
|
||||
# required: false
|
||||
# type: string
|
||||
# platform:
|
||||
# description: 'Platforms to built on'
|
||||
# default: '["Windows", "Linux"]'
|
||||
# required: false
|
||||
# type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
llmcpp-ref:
|
||||
|
|
|
|||
131
.github/workflows/llm_performance_tests.yml
vendored
131
.github/workflows/llm_performance_tests.yml
vendored
|
|
@ -70,12 +70,13 @@ on:
|
|||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
# llm-cpp-build: # please uncomment it for PR tests
|
||||
# uses: ./.github/workflows/llm-binary-build.yml
|
||||
llm-cpp-build:
|
||||
uses: ./.github/workflows/llm-binary-build.yml
|
||||
with: ${{ (github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main')) && 'Windows,Linux' || 'Dummy' }}
|
||||
|
||||
llm-performance-test-on-arc:
|
||||
if: ${{ github.event.schedule || ( github.event_name == 'workflow_dispatch' && (inputs.arc == 'true')) || github.event.inputs.artifact == 'llm-performance-test-on-arc' || github.event.inputs.artifact == 'all' }} # please comment it for PR tests
|
||||
# needs: llm-cpp-build # please uncomment it for PR tests
|
||||
if: ${{ github.event.schedule || ( github.event_name == 'workflow_dispatch' && (github.event.inputs.arc == 'true')) || github.event.inputs.artifact == 'llm-performance-test-on-arc' || github.event.inputs.artifact == 'all' }} # please comment it for PR tests
|
||||
needs: llm-cpp-build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
|
@ -85,7 +86,7 @@ jobs:
|
|||
OMP_NUM_THREADS: 16
|
||||
THREAD_NUM: 16
|
||||
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
|
||||
CSV_SAVE_PATH: ${{ (github.event.schedule || github.event_name == 'workflow_dispatch') && '/mnt/disk1/nightly_perf_gpu/' || '/mnt/disk1/pr_perf_gpu/' }}
|
||||
CSV_SAVE_PATH: ${{ (github.event.schedule || (github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref == 'main'))) && '/mnt/disk1/nightly_perf_gpu/' || '/mnt/disk1/pr_perf_gpu/' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # actions/checkout@v3
|
||||
|
|
@ -111,16 +112,19 @@ jobs:
|
|||
python -m pip install --upgrade transformers_stream_generator
|
||||
python -m pip install --upgrade tiktoken
|
||||
|
||||
# please uncomment it and comment the "Install IPEX-LLM from Pypi" part for PR tests
|
||||
# - name: Download llm binary
|
||||
# uses: ./.github/actions/llm/download-llm-binary
|
||||
# specific for test on certain commits
|
||||
- name: Download llm binary
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main') }}
|
||||
uses: ./.github/actions/llm/download-llm-binary
|
||||
|
||||
# - name: Run LLM install (all) test
|
||||
# uses: ./.github/actions/llm/setup-llm-env
|
||||
# with:
|
||||
# extra-dependency: "xpu_2.1"
|
||||
- name: Install IPEX-LLM from source
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main') }}
|
||||
uses: ./.github/actions/llm/setup-llm-env
|
||||
with:
|
||||
extra-dependency: "xpu_2.1"
|
||||
|
||||
- name: Install IPEX-LLM from Pypi
|
||||
if: ${{ github.event.schedule || (github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref == 'main')) }}
|
||||
shell: bash
|
||||
run: |
|
||||
pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/
|
||||
|
|
@ -339,8 +343,8 @@ jobs:
|
|||
|
||||
|
||||
llm-performance-test-on-spr:
|
||||
if: ${{ github.event.schedule || ( github.event_name == 'workflow_dispatch' && (inputs.spr == 'true')) || github.event.inputs.artifact == 'llm-performance-test-on-spr' || github.event.inputs.artifact == 'all' }} # please comment it for PR tests
|
||||
# needs: llm-cpp-build # please uncomment it for PR tests
|
||||
if: ${{ github.event.schedule || ( github.event_name == 'workflow_dispatch' && (github.event.inputs.spr == 'true')) || github.event.inputs.artifact == 'llm-performance-test-on-spr' || github.event.inputs.artifact == 'all' }} # please comment it for PR tests
|
||||
needs: llm-cpp-build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
|
@ -372,14 +376,17 @@ jobs:
|
|||
python -m pip install --upgrade tiktoken
|
||||
python -m pip install --upgrade transformers_stream_generator
|
||||
|
||||
# please uncomment it and comment the "Install IPEX-LLM from Pypi" part for PR tests
|
||||
# - name: Download llm binary
|
||||
# uses: ./.github/actions/llm/download-llm-binary
|
||||
# specific for test on certain commits
|
||||
- name: Download llm binary
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main') }}
|
||||
uses: ./.github/actions/llm/download-llm-binary
|
||||
|
||||
# - name: Run LLM install (all) test
|
||||
# uses: ./.github/actions/llm/setup-llm-env
|
||||
- name: Install IPEX-LLM from source
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main') }}
|
||||
uses: ./.github/actions/llm/setup-llm-env
|
||||
|
||||
- name: Install IPEX-LLM from Pypi
|
||||
if: ${{ github.event.schedule || (github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref == 'main')) }}
|
||||
shell: bash
|
||||
run: |
|
||||
pip install --pre --upgrade ipex-llm[all] --extra-index-url https://download.pytorch.org/whl/cpu
|
||||
|
|
@ -414,8 +421,8 @@ jobs:
|
|||
done
|
||||
|
||||
llm-performance-test-on-core:
|
||||
if: ${{ github.event.schedule || ( github.event_name == 'workflow_dispatch' && (inputs.core== 'true')) || github.event.inputs.artifact == 'llm-performance-test-on-core' || github.event.inputs.artifact == 'all' }} # please comment it for PR tests
|
||||
# needs: llm-cpp-build # please uncomment it for PR tests
|
||||
if: ${{ github.event.schedule || ( github.event_name == 'workflow_dispatch' && (github.event.inputs.core == 'true')) || github.event.inputs.artifact == 'llm-performance-test-on-core' || github.event.inputs.artifact == 'all' }} # please comment it for PR tests
|
||||
needs: llm-cpp-build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
|
@ -429,7 +436,7 @@ jobs:
|
|||
runs-on: [self-hosted, "${{ matrix.os }}", llm, perf-core, "${{ matrix.platform }}"]
|
||||
env:
|
||||
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
|
||||
CSV_SAVE_PATH: ${{ (github.event.schedule || github.event_name == 'workflow_dispatch') && 'D:/action-runners/nightly_perf_core_' || 'D:/action-runners/pr_perf_core_' }}${{ matrix.platform }}/
|
||||
CSV_SAVE_PATH: ${{ (github.event.schedule || (github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref == 'main'))) && 'D:/action-runners/nightly_perf_core_' || 'D:/action-runners/pr_perf_core_' }}${{ matrix.platform }}/
|
||||
steps:
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # actions/checkout@v3
|
||||
with:
|
||||
|
|
@ -449,14 +456,17 @@ jobs:
|
|||
python -m pip install --upgrade omegaconf pandas
|
||||
python -m pip install --upgrade tiktoken einops transformers_stream_generator
|
||||
|
||||
# please uncomment it and comment the "Install IPEX-LLM from Pypi" part for PR tests
|
||||
# - name: Download llm binary
|
||||
# uses: ./.github/actions/llm/download-llm-binary
|
||||
# specific for test on certain commits
|
||||
- name: Download llm binary
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main') }}
|
||||
uses: ./.github/actions/llm/download-llm-binary
|
||||
|
||||
# - name: Run LLM install (all) test
|
||||
# uses: ./.github/actions/llm/setup-llm-env
|
||||
- name: Install IPEX-LLM from source
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main') }}
|
||||
uses: ./.github/actions/llm/setup-llm-env
|
||||
|
||||
- name: Install IPEX-LLM from Pypi
|
||||
if: ${{ github.event.schedule || (github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref == 'main')) }}
|
||||
shell: bash
|
||||
run: |
|
||||
pip install --pre --upgrade ipex-llm[all]
|
||||
|
|
@ -490,8 +500,8 @@ jobs:
|
|||
fi
|
||||
|
||||
llm-performance-test-on-igpu:
|
||||
if: ${{ github.event.schedule || ( github.event_name == 'workflow_dispatch' && (inputs.igpu== 'true')) || github.event.inputs.artifact == 'llm-performance-test-on-igpu' || github.event.inputs.artifact == 'all' }} # please comment it for PR tests
|
||||
# needs: llm-cpp-build # please uncomment it for PR tests
|
||||
if: ${{ github.event.schedule || ( github.event_name == 'workflow_dispatch' && (github.event.inputs.igpu == 'true')) || github.event.inputs.artifact == 'llm-performance-test-on-igpu' || github.event.inputs.artifact == 'all' }} # please comment it for PR tests
|
||||
needs: llm-cpp-build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
|
@ -509,47 +519,52 @@ jobs:
|
|||
|
||||
# TODO: Put the ipex-llm related install process for win gpu into a action function
|
||||
|
||||
# Please uncomment it and commment the install from pypi for PR tests
|
||||
# - name: Download llm binary
|
||||
# uses: ./.github/actions/llm/download-llm-binary
|
||||
# specific for test on certain commits
|
||||
- name: Download llm binary
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main') }}
|
||||
uses: ./.github/actions/llm/download-llm-binary
|
||||
|
||||
# - name: Prepare for install ipex-llm from source
|
||||
# shell: bash
|
||||
# run: |
|
||||
# sed -i 's/"bigdl-core-xe-21==" + CORE_XE_VERSION/"bigdl-core-xe-21"/g' python/llm/setup.py
|
||||
# sed -i 's/"bigdl-core-xe-batch-21==" + CORE_XE_VERSION/"bigdl-core-xe-batch-21"/g' python/llm/setup.py
|
||||
# sed -i 's/"bigdl-core-xe-addons-21==" + CORE_XE_VERSION/"bigdl-core-xe-addons-21"/g' python/llm/setup.py
|
||||
# sed -i 's/"bigdl-core-xe-esimd-21==" + CORE_XE_VERSION/"bigdl-core-xe-esimd-21"/g' python/llm/setup.py
|
||||
- name: Prepare for install ipex-llm from source
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main') }}
|
||||
shell: bash
|
||||
run: |
|
||||
sed -i 's/"bigdl-core-xe-21==" + CORE_XE_VERSION/"bigdl-core-xe-21"/g' python/llm/setup.py
|
||||
sed -i 's/"bigdl-core-xe-batch-21==" + CORE_XE_VERSION/"bigdl-core-xe-batch-21"/g' python/llm/setup.py
|
||||
sed -i 's/"bigdl-core-xe-addons-21==" + CORE_XE_VERSION/"bigdl-core-xe-addons-21"/g' python/llm/setup.py
|
||||
sed -i 's/"bigdl-core-xe-esimd-21==" + CORE_XE_VERSION/"bigdl-core-xe-esimd-21"/g' python/llm/setup.py
|
||||
|
||||
# - name: Install ipex-llm and other related packages (install from source)
|
||||
# shell: cmd
|
||||
# run: |
|
||||
# call conda create -n igpu-perf python=${{ matrix.python-version }} libuv -y
|
||||
# call conda activate igpu-perf
|
||||
- name: Install ipex-llm and other related packages (install from source)
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref != 'main') }}
|
||||
shell: cmd
|
||||
run: |
|
||||
call conda create -n igpu-perf python=${{ matrix.python-version }} libuv -y
|
||||
call conda activate igpu-perf
|
||||
|
||||
# pip install --upgrade pip
|
||||
# pip install --upgrade wheel
|
||||
# pip install --upgrade omegaconf pandas
|
||||
# pip install --upgrade tiktoken einops transformers_stream_generator
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade wheel
|
||||
pip install --upgrade omegaconf pandas
|
||||
pip install --upgrade tiktoken einops transformers_stream_generator
|
||||
|
||||
# cd python\llm
|
||||
# python setup.py clean --all bdist_wheel --win
|
||||
# if not exist dist\ipex_llm*.whl (exit /b 1)
|
||||
# for %%i in (dist\ipex_llm*.whl) do set whl_name=%%i
|
||||
cd python\llm
|
||||
python setup.py clean --all bdist_wheel --win
|
||||
if not exist dist\ipex_llm*.whl (exit /b 1)
|
||||
for %%i in (dist\ipex_llm*.whl) do set whl_name=%%i
|
||||
|
||||
# pip install --pre --upgrade %whl_name%[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/
|
||||
# if %ERRORLEVEL% neq 0 (exit /b 1)
|
||||
# pip list
|
||||
pip install --pre --upgrade %whl_name%[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/
|
||||
if %ERRORLEVEL% neq 0 (exit /b 1)
|
||||
pip list
|
||||
|
||||
# call conda deactivate
|
||||
call conda deactivate
|
||||
|
||||
- name: Determine desired ipex-llm version
|
||||
if: ${{ github.event.schedule || (github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref == 'main')) }}
|
||||
shell: bash
|
||||
run: |
|
||||
test_version_date=`date -d 'yesterday' '+%Y%m%d'`
|
||||
echo "TEST_VERSION_DATE=${test_version_date}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Install ipex-llm and other related packages (install from pypi)
|
||||
if: ${{ github.event.schedule || (github.event_name == 'workflow_dispatch' && (github.event.inputs.checkout-ref == 'main')) }}
|
||||
shell: cmd
|
||||
run: |
|
||||
call conda create -n igpu-perf python=${{ matrix.python-version }} libuv -y
|
||||
|
|
@ -585,7 +600,9 @@ jobs:
|
|||
- name: Set directory envs & and fix generated csv date name
|
||||
shell: bash
|
||||
run: |
|
||||
if [ ${{ github.event_name }} == "schedule" ] || [ ${{ github.event_name }} == "workflow_dispatch" ]; then
|
||||
if [[ ${{ github.event_name }} == "schedule" ]]; then
|
||||
echo "CSV_SAVE_PATH=${CSV_NIGHTLY_PATH}" >> "$GITHUB_ENV"
|
||||
elif [[ ${{ github.event_name }} == "workflow_dispatch" ]] && [[ ${{ github.event.inputs.checkout-ref }} == "main" ]]; then
|
||||
echo "CSV_SAVE_PATH=${CSV_NIGHTLY_PATH}" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "CSV_SAVE_PATH=${CSV_PR_PATH}" >> "$GITHUB_ENV"
|
||||
|
|
|
|||
Loading…
Reference in a new issue