From 98798e98a1470209dad1fe21532be819c640a9c6 Mon Sep 17 00:00:00 2001 From: Jon Perry Date: Thu, 25 Jul 2024 15:04:37 -0400 Subject: [PATCH] chore: split out e2e workflows by GenAI Model --- .github/workflows/e2e-llama.yaml | 115 ++++++++++++++++++ .../{e2e-1.yaml => e2e-playright.yaml} | 11 +- .../{e2e-2.yaml => e2e-text-embeddings.yaml} | 42 ------- .github/workflows/e2e-vllm.yaml | 79 ++++++++++++ .github/workflows/e2e-whisper.yaml | 114 +++++++++++++++++ 5 files changed, 309 insertions(+), 52 deletions(-) create mode 100644 .github/workflows/e2e-llama.yaml rename .github/workflows/{e2e-1.yaml => e2e-playright.yaml} (93%) rename .github/workflows/{e2e-2.yaml => e2e-text-embeddings.yaml} (70%) create mode 100644 .github/workflows/e2e-vllm.yaml create mode 100644 .github/workflows/e2e-whisper.yaml diff --git a/.github/workflows/e2e-llama.yaml b/.github/workflows/e2e-llama.yaml new file mode 100644 index 0000000000..4a3323c889 --- /dev/null +++ b/.github/workflows/e2e-llama.yaml @@ -0,0 +1,115 @@ +# End-to-end testing that deploys Supabase and the API, and deploy/tests llama-cpp-python, text-embeddings, and whisper + +name: e2e-2 +on: + pull_request: + types: + - ready_for_review + - review_requested + - synchronize + - milestoned + paths: + # Catch-all + - "**" + + # Ignore updates to the .github directory, unless it's this current file + - "!.github/**" + - ".github/workflows/e2e-2.yaml" + + # Ignore docs and website things + - "!**.md" + - "!docs/**" + - "!adr/**" + - "!website/**" + - "!netlify.toml" + + # Ignore updates to generic github metadata files + - "!CODEOWNERS" + - "!.gitignore" + - "!LICENSE" + + # Ignore local development files + - "!.pre-commit-config.yaml" + + # Ignore non e2e tests + - "!tests/pytest/**" + + # Ignore LFAI-UI things (for now?) + - "!src/leapfrogai_ui/**" + - "!packages/ui/**" + + # Ignore changes to the repeater model + - "!packages/repeater/**" + + +concurrency: + group: e2e-2-${{ github.ref }} + cancel-in-progress: true + +jobs: + e2e_2: + runs-on: ai-ubuntu-big-boy-8-core + if: ${{ !github.event.pull_request.draft }} + + steps: + - name: Checkout Repo + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + + - name: Setup Python + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c #v5.0.0 + with: + python-version-file: 'pyproject.toml' + + - name: Install Python Deps + run: python -m pip install "." + + - name: Setup UDS Environment + uses: defenseunicorns/uds-common/.github/actions/setup@05f42bb3117b66ebef8c72ae050b34bce19385f5 + with: + username: ${{ secrets.IRON_BANK_ROBOT_USERNAME }} + password: ${{ secrets.IRON_BANK_ROBOT_PASSWORD }} + + - name: Create UDS Cluster + run: | + uds deploy k3d-core-slim-dev:0.22.2 --confirm + + ########## + # Supabase + ########## + - name: Deploy Supabase + run: | + make build-supabase LOCAL_VERSION=e2e-test + docker image prune -af + uds zarf tools kubectl create namespace leapfrogai + uds zarf package deploy packages/supabase/zarf-package-supabase-amd64-e2e-test.tar.zst --confirm + rm packages/supabase/zarf-package-supabase-amd64-e2e-test.tar.zst + + - name: Set environment variable + id: set-env-var + run: | + echo "ANON_KEY=$(uds zarf tools kubectl get secret supabase-bootstrap-jwt -n leapfrogai -o jsonpath='{.data.anon-key}' | base64 -d)" >> "$GITHUB_ENV" + + ########## + # API + ########## + - name: Deploy LFAI-API + run: | + make build-api LOCAL_VERSION=e2e-test + docker image prune -af + uds zarf package deploy packages/api/zarf-package-leapfrogai-api-amd64-e2e-test.tar.zst --confirm + rm packages/api/zarf-package-leapfrogai-api-amd64-e2e-test.tar.zst + + ########## + # llama + ########## + - name: Deploy llama-cpp-python + run: | + make build-llama-cpp-python LOCAL_VERSION=e2e-test + docker image prune -af + uds zarf package deploy packages/llama-cpp-python/zarf-package-llama-cpp-python-amd64-e2e-test.tar.zst -l=trace --confirm + rm packages/llama-cpp-python/zarf-package-llama-cpp-python-amd64-e2e-test.tar.zst + + - name: Test llama-cpp-python + run: | + python -m pip install requests + python -m pytest ./tests/e2e/test_llama.py -v diff --git a/.github/workflows/e2e-1.yaml b/.github/workflows/e2e-playright.yaml similarity index 93% rename from .github/workflows/e2e-1.yaml rename to .github/workflows/e2e-playright.yaml index 64da42bbdf..2eb57d40e2 100644 --- a/.github/workflows/e2e-1.yaml +++ b/.github/workflows/e2e-playright.yaml @@ -1,6 +1,6 @@ # End-to-end testing that deploys and tests Supabase, API, UI, and VLLM -name: e2e-1 +name: e2e-playright on: pull_request: types: @@ -139,12 +139,3 @@ jobs: - name: Cleanup UI run: | uds zarf package remove leapfrogai-ui --confirm - - ########## - # vLLM - # NOTE: We are not deploying and testing vLLM in this workflow because it requires a GPU - # : This workflow simply verifies that the vLLM package can be built - ########## - - name: Build vLLM - run: | - make build-vllm LOCAL_VERSION=e2e-test diff --git a/.github/workflows/e2e-2.yaml b/.github/workflows/e2e-text-embeddings.yaml similarity index 70% rename from .github/workflows/e2e-2.yaml rename to .github/workflows/e2e-text-embeddings.yaml index 3e5032a4f9..2ef57d3187 100644 --- a/.github/workflows/e2e-2.yaml +++ b/.github/workflows/e2e-text-embeddings.yaml @@ -99,25 +99,6 @@ jobs: uds zarf package deploy packages/api/zarf-package-leapfrogai-api-amd64-e2e-test.tar.zst --confirm rm packages/api/zarf-package-leapfrogai-api-amd64-e2e-test.tar.zst - ########## - # llama - ########## - - name: Deploy llama-cpp-python - run: | - make build-llama-cpp-python LOCAL_VERSION=e2e-test - docker image prune -af - uds zarf package deploy packages/llama-cpp-python/zarf-package-llama-cpp-python-amd64-e2e-test.tar.zst -l=trace --confirm - rm packages/llama-cpp-python/zarf-package-llama-cpp-python-amd64-e2e-test.tar.zst - - - name: Test llama-cpp-python - run: | - python -m pip install requests - python -m pytest ./tests/e2e/test_llama.py -v - - - name: Cleanup llama-cpp-python - run: | - uds zarf package remove llama-cpp-python -l=trace --confirm - ########## # text-embeddings ########## @@ -135,26 +116,3 @@ jobs: - name: Cleanup text-embeddings run: | uds zarf package remove text-embeddings -l=trace --confirm - - ########## - # whisper - ########## - - name: Deploy whisper - run: | - make build-whisper LOCAL_VERSION=e2e-test - docker image prune -af - uds zarf package deploy packages/whisper/zarf-package-whisper-amd64-e2e-test.tar.zst -l=trace --confirm - rm packages/whisper/zarf-package-whisper-amd64-e2e-test.tar.zst - - - name: Test whisper - run: | - python -m pytest ./tests/e2e/test_whisper.py -v - - - name: Cleanup whisper - run: | - uds zarf package remove whisper -l=trace --confirm - - # This cleanup may need to be moved/removed when other packages depend on Supabase - - name: Cleanup Supabase - run: | - uds zarf package remove supabase -l=trace --confirm diff --git a/.github/workflows/e2e-vllm.yaml b/.github/workflows/e2e-vllm.yaml new file mode 100644 index 0000000000..a72b6dc05a --- /dev/null +++ b/.github/workflows/e2e-vllm.yaml @@ -0,0 +1,79 @@ +# End-to-end testing that deploys and tests Supabase, API, UI, and VLLM + +name: e2e-vllm +on: + pull_request: + types: + - ready_for_review + - review_requested + - synchronize + - milestoned + paths: + # Catch-all + - "**" + + # Ignore updates to the .github directory, unless it's this current file + - "!.github/**" + - ".github/workflows/e2e-1.yaml" + + # Ignore docs and website things + - "!**.md" + - "!docs/**" + - "!adr/**" + - "!website/**" + - "!netlify.toml" + + # Ignore updates to generic github metadata files + - "!CODEOWNERS" + - "!.gitignore" + - "!LICENSE" + + # Ignore local development files + - "!.pre-commit-config.yaml" + + # Ignore non e2e tests + - "!tests/pytest/**" + + # Ignore LFAI-UI things (for now?) + - "!src/leapfrogai_ui/**" + - "!packages/ui/**" + + # Ignore changes to the repeater model + - "!packages/repeater/**" + + +concurrency: + group: e2e-1-${{ github.ref }} + cancel-in-progress: true + +jobs: + e2e_1: + runs-on: ai-ubuntu-big-boy-8-core + if: ${{ !github.event.pull_request.draft }} + + steps: + - name: Checkout Repo + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + + - name: Setup Python + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c #v5.0.0 + with: + python-version-file: 'pyproject.toml' + + - name: Install Python Deps + run: python -m pip install "." + + - name: Setup UDS Environment + uses: defenseunicorns/uds-common/.github/actions/setup@05f42bb3117b66ebef8c72ae050b34bce19385f5 + with: + username: ${{ secrets.IRON_BANK_ROBOT_USERNAME }} + password: ${{ secrets.IRON_BANK_ROBOT_PASSWORD }} + + ########## c + # vLLM + # NOTE: We are not deploying and testing vLLM in this workflow because it requires a GPU + # : This workflow simply verifies that the vLLM package can be built + ########## + - name: Build vLLM + run: | + make build-vllm LOCAL_VERSION=e2e-test diff --git a/.github/workflows/e2e-whisper.yaml b/.github/workflows/e2e-whisper.yaml new file mode 100644 index 0000000000..68ec5c31c4 --- /dev/null +++ b/.github/workflows/e2e-whisper.yaml @@ -0,0 +1,114 @@ +# End-to-end testing that deploys Supabase and the API, and deploy/tests llama-cpp-python, text-embeddings, and whisper + +name: e2e-2 +on: + pull_request: + types: + - ready_for_review + - review_requested + - synchronize + - milestoned + paths: + # Catch-all + - "**" + + # Ignore updates to the .github directory, unless it's this current file + - "!.github/**" + - ".github/workflows/e2e-2.yaml" + + # Ignore docs and website things + - "!**.md" + - "!docs/**" + - "!adr/**" + - "!website/**" + - "!netlify.toml" + + # Ignore updates to generic github metadata files + - "!CODEOWNERS" + - "!.gitignore" + - "!LICENSE" + + # Ignore local development files + - "!.pre-commit-config.yaml" + + # Ignore non e2e tests + - "!tests/pytest/**" + + # Ignore LFAI-UI things (for now?) + - "!src/leapfrogai_ui/**" + - "!packages/ui/**" + + # Ignore changes to the repeater model + - "!packages/repeater/**" + + +concurrency: + group: e2e-2-${{ github.ref }} + cancel-in-progress: true + +jobs: + e2e_2: + runs-on: ai-ubuntu-big-boy-8-core + if: ${{ !github.event.pull_request.draft }} + + steps: + - name: Checkout Repo + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + + - name: Setup Python + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c #v5.0.0 + with: + python-version-file: 'pyproject.toml' + + - name: Install Python Deps + run: python -m pip install "." + + - name: Setup UDS Environment + uses: defenseunicorns/uds-common/.github/actions/setup@05f42bb3117b66ebef8c72ae050b34bce19385f5 + with: + username: ${{ secrets.IRON_BANK_ROBOT_USERNAME }} + password: ${{ secrets.IRON_BANK_ROBOT_PASSWORD }} + + - name: Create UDS Cluster + run: | + uds deploy k3d-core-slim-dev:0.22.2 --confirm + + ########## + # Supabase + ########## + - name: Deploy Supabase + run: | + make build-supabase LOCAL_VERSION=e2e-test + docker image prune -af + uds zarf tools kubectl create namespace leapfrogai + uds zarf package deploy packages/supabase/zarf-package-supabase-amd64-e2e-test.tar.zst --confirm + rm packages/supabase/zarf-package-supabase-amd64-e2e-test.tar.zst + + - name: Set environment variable + id: set-env-var + run: | + echo "ANON_KEY=$(uds zarf tools kubectl get secret supabase-bootstrap-jwt -n leapfrogai -o jsonpath='{.data.anon-key}' | base64 -d)" >> "$GITHUB_ENV" + + ########## + # API + ########## + - name: Deploy LFAI-API + run: | + make build-api LOCAL_VERSION=e2e-test + docker image prune -af + uds zarf package deploy packages/api/zarf-package-leapfrogai-api-amd64-e2e-test.tar.zst --confirm + rm packages/api/zarf-package-leapfrogai-api-amd64-e2e-test.tar.zst + + ########## + # whisper + ########## + - name: Deploy whisper + run: | + make build-whisper LOCAL_VERSION=e2e-test + docker image prune -af + uds zarf package deploy packages/whisper/zarf-package-whisper-amd64-e2e-test.tar.zst -l=trace --confirm + rm packages/whisper/zarf-package-whisper-amd64-e2e-test.tar.zst + + - name: Test whisper + run: | + python -m pytest ./tests/e2e/test_whisper.py -v