Separate llmcpp build of linux and windows (#10136)

* separate linux window llmcpp build

* harness run on linux only

* fix platform

* skip error

* change to linux only build

* add judgement of platform

* add download args

* remove  ||true
This commit is contained in:
Chen, Zhentao 2024-02-26 15:04:29 +08:00 committed by GitHub
parent ea23afc8ec
commit 5ad752bae8
6 changed files with 51 additions and 11 deletions

View file

@ -1,6 +1,11 @@
name: Download LLM binary files
description: Download built binary files from github artifact
inputs:
platform:
description: 'Platforms to built on'
default: 'Windows,Linux'
required: false
type: string
runs:
using: "composite"
steps:
@ -11,14 +16,18 @@ runs:
run: |
rm -rf python/llm/llm-binary || true
mkdir -p python/llm/llm-binary
mv linux-avx2/* python/llm/llm-binary/
mv linux-avx512/* python/llm/llm-binary/
mv linux-avxvnni/* python/llm/llm-binary/
mv linux-avx/* python/llm/llm-binary/
mv linux-amx/* python/llm/llm-binary/
mv windows-avx2/* python/llm/llm-binary/
mv windows-avx-vnni/* python/llm/llm-binary/
mv windows-avx/* python/llm/llm-binary/
if ${{contains(inputs.platform, 'Linux')}}; then
mv linux-avx2/* python/llm/llm-binary/
mv linux-avx512/* python/llm/llm-binary/
mv linux-avxvnni/* python/llm/llm-binary/
mv linux-avx/* python/llm/llm-binary/
mv linux-amx/* python/llm/llm-binary/
fi
if ${{contains(inputs.platform, 'Windows')}}; then
mv windows-avx2/* python/llm/llm-binary/
mv windows-avx-vnni/* python/llm/llm-binary/
mv windows-avx/* python/llm/llm-binary/
fi
rm -rf linux-avx2 || true
rm -rf linux-avx512 || true
rm -rf linux-avxvnni || true

View file

@ -26,6 +26,11 @@ on:
default: ''
required: false
type: string
platform:
description: 'Platforms to built on'
default: '["Windows", "Linux"]'
required: false
type: string
workflow_call:
inputs:
llmcpp-ref:
@ -33,10 +38,16 @@ on:
default: ''
required: false
type: string
platform:
description: 'Platforms to built on'
default: 'Windows,Linux'
required: false
type: string
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
check-linux-avxvnni-artifact:
if: ${{contains(inputs.platform, 'Linux')}}
runs-on: ubuntu-latest
outputs:
if-exists: ${{steps.check_artifact.outputs.exists}}
@ -149,6 +160,7 @@ jobs:
conda remove -n python311 --all -y
check-linux-avx512-artifact:
if: ${{contains(inputs.platform, 'Linux')}}
runs-on: ubuntu-latest
outputs:
if-exists: ${{steps.check_artifact.outputs.exists}}
@ -249,6 +261,7 @@ jobs:
conda remove -n python39 --all -y
check-linux-amx-artifact:
if: ${{contains(inputs.platform, 'Linux')}}
runs-on: ubuntu-latest
outputs:
if-exists: ${{steps.check_artifact.outputs.exists}}
@ -319,6 +332,7 @@ jobs:
conda remove -n python39 --all -y
check-windows-avx2-artifact:
if: ${{contains(inputs.platform, 'Windows')}}
runs-on: ubuntu-latest
outputs:
if-exists: ${{steps.check_artifact.outputs.exists}}
@ -363,6 +377,7 @@ jobs:
build/Release
check-windows-avx-vnni-artifact:
if: ${{contains(inputs.platform, 'Windows')}}
runs-on: ubuntu-latest
outputs:
if-exists: ${{steps.check_artifact.outputs.exists}}
@ -472,6 +487,7 @@ jobs:
release
check-windows-avx-artifact:
if: ${{contains(inputs.platform, 'Windows')}}
runs-on: ubuntu-latest
outputs:
if-exists: ${{steps.check_artifact.outputs.exists}}

View file

@ -34,6 +34,8 @@ on:
jobs:
llm-cpp-build:
uses: ./.github/workflows/llm-binary-build.yml
with:
platform: 'Linux'
# Set the testing matrix based on the event (schedule, PR, or manual dispatch)
set-matrix:
runs-on: ubuntu-latest
@ -110,6 +112,8 @@ jobs:
- name: Download llm binary
uses: ./.github/actions/llm/download-llm-binary
with:
platform: 'Linux'
- name: Run LLM install (all) test
uses: ./.github/actions/llm/setup-llm-env

View file

@ -42,6 +42,8 @@ on:
jobs:
llm-cpp-build:
uses: ./.github/workflows/llm-binary-build.yml
with:
platform: 'Linux'
# Set the testing matrix based on the event (schedule, PR, or manual dispatch)
set-matrix:
runs-on: ubuntu-latest
@ -132,6 +134,8 @@ jobs:
- name: Download llm binary
uses: ./.github/actions/llm/download-llm-binary
with:
platform: 'Linux'
- name: Run LLM install (all) test
uses: ./.github/actions/llm/setup-llm-env

View file

@ -42,6 +42,8 @@ on:
jobs:
llm-cpp-build:
uses: ./.github/workflows/llm-binary-build.yml
with:
platform: 'Linux'
set-matrix:
runs-on: ubuntu-latest
outputs:
@ -128,6 +130,8 @@ jobs:
- name: Download llm binary
uses: ./.github/actions/llm/download-llm-binary
with:
platform: 'Linux'
- name: Run LLM install (all) test
uses: ./.github/actions/llm/setup-llm-env

View file

@ -23,7 +23,8 @@ on:
jobs:
llm-cpp-build:
uses: ./.github/workflows/llm-binary-build.yml
with:
platform: 'Linux'
llm-perf-regression-test-on-arc:
needs: llm-cpp-build
strategy:
@ -60,6 +61,8 @@ jobs:
- name: Download llm binary
uses: ./.github/actions/llm/download-llm-binary
with:
platform: 'Linux'
- name: Run LLM install (all) test
uses: ./.github/actions/llm/setup-llm-env