From e717e304a61f456021ece454484886365e08aff3 Mon Sep 17 00:00:00 2001 From: Song Jiaming Date: Thu, 10 Aug 2023 10:03:11 +0800 Subject: [PATCH] LLM first example test and template (#8658) --- .github/workflows/llm_example_tests.yml | 70 ++++++++++++++++++++++++ python/llm/dev/test/run-example-tests.sh | 18 ++++++ 2 files changed, 88 insertions(+) create mode 100644 .github/workflows/llm_example_tests.yml create mode 100644 python/llm/dev/test/run-example-tests.sh diff --git a/.github/workflows/llm_example_tests.yml b/.github/workflows/llm_example_tests.yml new file mode 100644 index 00000000..e8f0ba20 --- /dev/null +++ b/.github/workflows/llm_example_tests.yml @@ -0,0 +1,70 @@ +name: LLM Example Test + +# Cancel previous runs in the PR when you push new commits +concurrency: + group: ${{ github.workflow }}-llm-example-tests-${{ github.event.pull_request.number || github.run_id }} + cancel-in-progress: true + +# Controls when the action will run. +on: + schedule: + - cron: '00 13 * * *' # GMT time, 13:00 GMT == 21:00 China + pull_request: + branches: [ main ] + paths: + - '.github/workflows/llm_example_tests.yml' + - '.github/workflows/llm-binary-build.yml' + - '.github/actions/llm/example-test/action.yml' + - '.github/actions/llm/setup-llm-env/action.yml' + - '.github/actions/llm/remove-llm-env/action.yml' + - '.github/actions/llm/download-llm-binary/action.yml' + workflow_dispatch: + workflow_call: + +env: + INT4_CKPT_DIR: ./llm/ggml-actions/stable + LLM_DIR: ./llm + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + llm-cpp-build: + uses: ./.github/workflows/llm-binary-build.yml + llm-example-test: + needs: llm-cpp-build + strategy: + fail-fast: false + matrix: + python-version: ["3.9"] + instruction: ["AVX512"] + runs-on: [ self-hosted, llm,"${{matrix.instruction}}", ubuntu-20.04-lts ] + env: + THREAD_NUM: 24 + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install --upgrade setuptools==58.0.4 + python -m pip install --upgrade wheel + + - name: Download llm binary + uses: ./.github/actions/llm/download-llm-binary + + - name: Run LLM install (all) test + uses: ./.github/actions/llm/setup-llm-env + env: + ANALYTICS_ZOO_ROOT: ${{ github.workspace }} + + - name: Run LLM example test + uses: ./.github/actions/llm/example-test + env: + ANALYTICS_ZOO_ROOT: ${{ github.workspace }} + + # - name: Clean up test environment + # uses: ./.github/actions/llm/remove-llm-env + # env: + # ANALYTICS_ZOO_ROOT: ${{ github.workspace }} diff --git a/python/llm/dev/test/run-example-tests.sh b/python/llm/dev/test/run-example-tests.sh new file mode 100644 index 00000000..33cb6f91 --- /dev/null +++ b/python/llm/dev/test/run-example-tests.sh @@ -0,0 +1,18 @@ +# LLAMA2 example test +if [ ! -d $ORIGINAL_LLAMA2_PATH ]; then + echo "Directory $ORIGINAL_LLAMA2_PATH not found. Downloading from FTP server..." + wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/${ORIGINAL_LLAMA2_PATH:2} -P $LLM_DIR +fi + +if [ -z "$THREAD_NUM" ]; then + THREAD_NUM=2 +fi +export OMP_NUM_THREADS=$THREAD_NUM + +std=$(taskset -c 0-$((THREAD_NUM - 1)) python python/llm/example/transformers/transformers_int4/llama2/generate.py --repo-id-or-model-path $ORIGINAL_LLAMA2_PATH) +echo "the output of the example is: " +echo $std +if [[ ! $std == *"AI is a term"* ]]; then + echo "The expected output is not met." + return 1 +fi