[LLM] Refactor LLM Linux tests (#8349)

* Small name fix

* Add convert nightly tests, and for other llm tests, use stable ckpt

* Small fix and ftp fix

* Small fix

* Small fix
This commit is contained in:
Yuwen Hu 2023-06-16 15:22:48 +08:00 committed by GitHub
parent 9daf543e2f
commit 1aa33d35d5
8 changed files with 151 additions and 58 deletions

87
.github/workflows/llm-nightly-test.yml vendored Normal file
View file

@ -0,0 +1,87 @@
name: LLM Nightly Tests
# Controls when the action will run.
on:
schedule:
- cron: '00 13 * * *' # GMT time, 13:00 GMT == 21:00 China
pull_request:
branches: [ main ]
paths:
- '.github/workflows/llm-nightly-test.yml'
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
llm-nightly-convert-test:
runs-on: [ self-hosted, Gondolin, ubuntu-20.04-lts ]
strategy:
fail-fast: false
matrix:
python-version: ["3.9"]
env:
ORIGIN_DIR: ./llm/models
LLAMA_ORIGIN_PATH: ./llm/models/llama-7b-hf
GPTNEOX_ORIGIN_PATH: ./llm/models/gptneox-7b-redpajama-bf16
BLOOM_ORIGIN_PATH: ./llm/models/bloomz-7b1
INT4_CKPT_DIR: ./llm/ggml-actions/nightly
LLAMA_INT4_CKPT_PATH: ./llm/ggml-actions/nightly/bigdl_llm_llama_q4_0.bin
GPTNEOX_INT4_CKPT_PATH: ./llm/ggml-actions/nightly/bigdl_llm_gptneox_q4_0.bin
BLOOM_INT4_CKPT_PATH: ./llm/ggml-actions/nightly/bigdl_llm_bloom_q4_0.bin
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade setuptools==58.0.4
python -m pip install --upgrade wheel
- name: Download original models
run: |
llm_ftp_user=${RUNNER_REPOSITORY_URL:35}
llm_ftp_user=$(echo $llm_ftp_user | tr '[:upper:]' '[:lower:]')
llm_ftp_url=${FTP_URI:0:6}${llm_ftp_user}${FTP_URI:9}:8821
if [ ! -d $LLAMA_ORIGIN_PATH ]; then
echo "Directory $LLAMA_ORIGIN_PATH not found. Downloading from FTP server..."
wget -r -nH --no-verbose --cut-dirs=1 $llm_ftp_url/llm/llama-7b-hf -P $ORIGIN_DIR
fi
if [ ! -d $GPTNEOX_ORIGIN_PATH ]; then
echo "Directory $GPTNEOX_ORIGIN_PATH not found. Downloading from FTP server..."
wget -r -nH --no-verbose --cut-dirs=1 $llm_ftp_url/llm/gptneox-7b-redpajama-bf16 -P $ORIGIN_DIR
fi
if [ ! -d $BLOOM_ORIGIN_PATH ]; then
echo "Directory $BLOOM_ORIGIN_PATH not found. Downloading from FTP server..."
wget -r -nH --no-verbose --cut-dirs=1 $llm_ftp_url/llm/bloomz-7b1 -P $ORIGIN_DIR
fi
- name: Run LLM convert test
shell: bash
run: |
$CONDA_HOME/bin/conda env remove -y -n bigdl-init-llm || true
$CONDA_HOME/bin/conda create -n bigdl-init-llm -y python==3.9 setuptools==58.0.4 -c ${GONDOLIN_CONDA_CHANNEL} --override-channels
source $CONDA_HOME/bin/activate bigdl-init-llm
$CONDA_HOME/bin/conda info
pip install requests
bash python/llm/dev/release_default_linux.sh default false
whl_name=$(ls python/llm/dist)
pip install -i https://pypi.python.org/simple "python/llm/dist/${whl_name}[all]"
pip install pytest
bash python/llm/test/run-llm-convert-tests.sh
source $CONDA_HOME/bin/deactivate
$CONDA_HOME/bin/conda remove -n bigdl-init-llm --all
env:
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
- name: Upload ckpt to ftp
run: |
apt-get install tnftp
llm_ftp_user=${RUNNER_REPOSITORY_URL:35}
llm_ftp_user=$(echo $llm_ftp_user | tr '[:upper:]' '[:lower:]')
llm_ftp_url=${FTP_URI:0:6}${llm_ftp_user}${FTP_URI:9}:8821
tnftp -u ${llm_ftp_url}/${INT4_CKPT_DIR:1}/bigdl_llm_llama_7b_q4_0.bin $LLAMA_INT4_CKPT_PATH
tnftp -u ${llm_ftp_url}/${INT4_CKPT_DIR:1}/bigdl_llm_redpajama_7b_q4_0.bin $GPTNEOX_INT4_CKPT_PATH
tnftp -u ${llm_ftp_url}/${INT4_CKPT_DIR:1}/bigdl_llm_bloom_7b_q4_0.bin $BLOOM_INT4_CKPT_PATH

View file

@ -1,4 +1,4 @@
name: LLM Unit Tests Basic on Linux
name: LLM Unit Tests on Linux
# Cancel previous runs in the PR when you push new commits
concurrency:
@ -28,14 +28,10 @@ jobs:
matrix:
python-version: ["3.9"]
env:
ORIGIN_DIR: ./llm/models
LLAMA_ORIGIN_PATH: ./llm/models/llama-7b-hf
GPTNEOX_ORIGIN_PATH: ./llm/models/gptneox-7b-redpajama-bf16
BLOOM_ORIGIN_PATH: ./llm/models/bloomz-7b1
INT4_CKPT_DIR: ./llm/ggml
LLAMA_INT4_CKPT_PATH: ./llm/ggml/bigdl_llm_llama_q4_0.bin
GPTNEOX_INT4_CKPT_PATH: ./llm/ggml/bigdl_llm_gptneox_q4_0.bin
BLOOM_INT4_CKPT_PATH: ./llm/ggml/bigdl_llm_bloom_q4_0.bin
INT4_CKPT_DIR: ./llm/ggml-actions/stable
LLAMA_INT4_CKPT_PATH: ./llm/ggml-actions/stable/bigdl_llm_llama_7b_q4_0.bin
GPTNEOX_INT4_CKPT_PATH: ./llm/ggml-actions/stable/bigdl_llm_redpajama_7b_q4_0.bin
BLOOM_INT4_CKPT_PATH: ./llm/ggml-actions/stable/bigdl_llm_bloom_7b_q4_0.bin
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
@ -62,26 +58,8 @@ jobs:
$CONDA_HOME/bin/conda remove -n bigdl-init-llm --all
env:
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
- name: Download original models
run: |
llm_ftp_user=${RUNNER_REPOSITORY_URL:35}
llm_ftp_user=$(echo $llm_ftp_user | tr '[:upper:]' '[:lower:]')
llm_ftp_url=${FTP_URI:0:6}${llm_ftp_user}${FTP_URI:9}:8821
if [ ! -d $LLAMA_ORIGIN_PATH ]; then
echo "Directory $LLAMA_ORIGIN_PATH not found. Downloading from FTP server..."
wget -r -nH --no-verbose --cut-dirs=1 $llm_ftp_url/llm/llama-7b-hf -P $ORIGIN_DIR
fi
if [ ! -d $GPTNEOX_ORIGIN_PATH ]; then
echo "Directory $GPTNEOX_ORIGIN_PATH not found. Downloading from FTP server..."
wget -r -nH --no-verbose --cut-dirs=1 $llm_ftp_url/llm/gptneox-7b-redpajama-bf16 -P $ORIGIN_DIR
fi
if [ ! -d $BLOOM_ORIGIN_PATH ]; then
echo "Directory $BLOOM_ORIGIN_PATH not found. Downloading from FTP server..."
wget -r -nH --no-verbose --cut-dirs=1 $llm_ftp_url/llm/bloomz-7b1 -P $ORIGIN_DIR
fi
- name: Run LLM basic test (native install & convert)
- name: Run LLM install (all) test
shell: bash
run: |
$CONDA_HOME/bin/conda env remove -y -n bigdl-init-llm || true
@ -93,10 +71,28 @@ jobs:
whl_name=$(ls python/llm/dist)
pip install -i https://pypi.python.org/simple "python/llm/dist/${whl_name}[all]"
pip install pytest
bash python/llm/test/run-llm-basic-tests.sh
bash python/llm/test/run-llm-install-tests.sh
source $CONDA_HOME/bin/deactivate
$CONDA_HOME/bin/conda remove -n bigdl-init-llm --all
env:
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
- name: Download ckpt models
run: |
llm_ftp_user=${RUNNER_REPOSITORY_URL:35}
llm_ftp_user=$(echo $llm_ftp_user | tr '[:upper:]' '[:lower:]')
llm_ftp_url=${FTP_URI:0:6}${llm_ftp_user}${FTP_URI:9}:8821
if [ ! -d $LLAMA_INT4_CKPT_PATH ]; then
echo "Directory $LLAMA_INT4_CKPT_PATH not found. Downloading from FTP server..."
wget --no-verbose $llm_ftp_url/${LLAMA_INT4_CKPT_PATH:1} -P $INT4_CKPT_DIR
fi
if [ ! -d $GPTNEOX_INT4_CKPT_PATH ]; then
echo "Directory $GPTNEOX_INT4_CKPT_PATH not found. Downloading from FTP server..."
wget --no-verbose $llm_ftp_url/${GPTNEOX_INT4_CKPT_PATH:1} -P $INT4_CKPT_DIR
fi
if [ ! -d $BLOOM_INT4_CKPT_PATH ]; then
echo "Directory $BLOOM_INT4_CKPT_PATH not found. Downloading from FTP server..."
wget --no-verbose $llm_ftp_url/${BLOOM_INT4_CKPT_PATH:1} -P $INT4_CKPT_DIR
fi
# new test steps should be added here

View file

@ -1,4 +1,4 @@
name: LLM Unit Tests Basic on Windows
name: LLM Unit Tests on Windows
# Cancel previous runs in the PR when you push new commits
concurrency:
@ -49,7 +49,7 @@ jobs:
env:
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
- name: Run LLM basic test (naive installation)
- name: Run LLM install (all) test
shell: bash
run: |
pip install requests
@ -57,6 +57,6 @@ jobs:
whl_name=$(ls python/llm/dist)
pip install -i https://pypi.python.org/simple "python/llm/dist/${whl_name}[all]"
pip install pytest
bash python/llm/test/run-llm-basic-tests.sh windows
bash python/llm/test/run-llm-install-tests.sh
env:
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}

View file

@ -1,26 +0,0 @@
#!/bin/bash
export ANALYTICS_ZOO_ROOT=${ANALYTICS_ZOO_ROOT}
export LLM_HOME=${ANALYTICS_ZOO_ROOT}/python/llm/src
export LLM_BASIC_TEST_DIR=${ANALYTICS_ZOO_ROOT}/python/llm/test/basic
set -e
echo "# Start testing"
start=$(date "+%s")
echo "test install"
python -m pytest -s ${LLM_BASIC_TEST_DIR}/install
# TODO: supports tests on windows
platform=$1
if [[ $1 != "windows" ]]; then
echo "test convert model"
python -m pytest -s ${LLM_BASIC_TEST_DIR}/convert
fi
now=$(date "+%s")
time=$((now-start))
echo "Bigdl-llm tests finished"
echo "Time used:$time seconds"

View file

@ -0,0 +1,18 @@
#!/bin/bash
export ANALYTICS_ZOO_ROOT=${ANALYTICS_ZOO_ROOT}
export LLM_HOME=${ANALYTICS_ZOO_ROOT}/python/llm/src
export LLM_CONVERT_TEST_DIR=${ANALYTICS_ZOO_ROOT}/python/llm/test/convert
set -e
echo "# Start testing convert"
start=$(date "+%s")
python -m pytest -s ${LLM_CONVERT_TEST_DIR}
now=$(date "+%s")
time=$((now-start))
echo "Bigdl-llm tests finished"
echo "Time used:$time seconds"

View file

@ -0,0 +1,18 @@
#!/bin/bash
export ANALYTICS_ZOO_ROOT=${ANALYTICS_ZOO_ROOT}
export LLM_HOME=${ANALYTICS_ZOO_ROOT}/python/llm/src
export LLM_INSTALL_TEST_DIR=${ANALYTICS_ZOO_ROOT}/python/llm/test/install
set -e
echo "# Start testing install"
start=$(date "+%s")
python -m pytest -s ${LLM_INSTALL_TEST_DIR}
now=$(date "+%s")
time=$((now-start))
echo "Bigdl-llm tests finished"
echo "Time used:$time seconds"