LLM first example test and template (#8658)

This commit is contained in:
Song Jiaming 2023-08-10 10:03:11 +08:00 committed by GitHub
parent 1a7b698a83
commit e717e304a6
2 changed files with 88 additions and 0 deletions

70
.github/workflows/llm_example_tests.yml vendored Normal file
View file

@ -0,0 +1,70 @@
name: LLM Example Test
# Cancel previous runs in the PR when you push new commits
concurrency:
group: ${{ github.workflow }}-llm-example-tests-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
# Controls when the action will run.
on:
schedule:
- cron: '00 13 * * *' # GMT time, 13:00 GMT == 21:00 China
pull_request:
branches: [ main ]
paths:
- '.github/workflows/llm_example_tests.yml'
- '.github/workflows/llm-binary-build.yml'
- '.github/actions/llm/example-test/action.yml'
- '.github/actions/llm/setup-llm-env/action.yml'
- '.github/actions/llm/remove-llm-env/action.yml'
- '.github/actions/llm/download-llm-binary/action.yml'
workflow_dispatch:
workflow_call:
env:
INT4_CKPT_DIR: ./llm/ggml-actions/stable
LLM_DIR: ./llm
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
llm-cpp-build:
uses: ./.github/workflows/llm-binary-build.yml
llm-example-test:
needs: llm-cpp-build
strategy:
fail-fast: false
matrix:
python-version: ["3.9"]
instruction: ["AVX512"]
runs-on: [ self-hosted, llm,"${{matrix.instruction}}", ubuntu-20.04-lts ]
env:
THREAD_NUM: 24
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade setuptools==58.0.4
python -m pip install --upgrade wheel
- name: Download llm binary
uses: ./.github/actions/llm/download-llm-binary
- name: Run LLM install (all) test
uses: ./.github/actions/llm/setup-llm-env
env:
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
- name: Run LLM example test
uses: ./.github/actions/llm/example-test
env:
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
# - name: Clean up test environment
# uses: ./.github/actions/llm/remove-llm-env
# env:
# ANALYTICS_ZOO_ROOT: ${{ github.workspace }}

View file

@ -0,0 +1,18 @@
# LLAMA2 example test
if [ ! -d $ORIGINAL_LLAMA2_PATH ]; then
echo "Directory $ORIGINAL_LLAMA2_PATH not found. Downloading from FTP server..."
wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/${ORIGINAL_LLAMA2_PATH:2} -P $LLM_DIR
fi
if [ -z "$THREAD_NUM" ]; then
THREAD_NUM=2
fi
export OMP_NUM_THREADS=$THREAD_NUM
std=$(taskset -c 0-$((THREAD_NUM - 1)) python python/llm/example/transformers/transformers_int4/llama2/generate.py --repo-id-or-model-path $ORIGINAL_LLAMA2_PATH)
echo "the output of the example is: "
echo $std
if [[ ! $std == *"AI is a term"* ]]; then
echo "The expected output is not met."
return 1
fi