-
Notifications
You must be signed in to change notification settings - Fork 29
97 lines (80 loc) · 2.85 KB
/
e2e-llama-cpp-python.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
# End-to-end testing that deploys Supabase and the API, and deploy/tests llama-cpp-python, text-embeddings, and whisper
name: e2e-llama-cpp-python
on:
pull_request:
types:
- opened # default trigger
- reopened # default trigger
- synchronize # default trigger
- ready_for_review # don't run on draft PRs
- milestoned # allows us to trigger on bot PRs
paths:
# Catch-all
- "**"
# Ignore updates to the .github directory, unless it's this current file
- "!.github/**"
- ".github/workflows/e2e-llama-cpp-python.yaml"
- ".github/actions/uds-cluster/action.yaml"
# Ignore docs and website things
- "!**.md"
- "!docs/**"
- "!adr/**"
- "!website/**"
- "!netlify.toml"
# Ignore updates to generic github metadata files
- "!CODEOWNERS"
- "!.gitignore"
- "!LICENSE"
# Ignore local development files
- "!.pre-commit-config.yaml"
- "!tasks.yaml"
# Ignore non e2e tests changes
- "!tests/pytest/**"
# Ignore LFAI-UI source code changes
- "!src/leapfrogai_ui/**"
# Ignore changes to unrelated packages
- "!packages/k3d-gpu/**"
- "!packages/repeater/**"
- "!packages/text-embeddings/**"
- "!packages/ui/**"
- "!packages/vllm/**"
- "!packages/whisper/**"
concurrency:
group: e2e-llama-cpp-python-${{ github.ref }}
cancel-in-progress: true
jobs:
e2e_llama:
runs-on: ai-ubuntu-big-boy-8-core
if: ${{ !github.event.pull_request.draft }}
permissions:
contents: read
packages: read
id-token: write # This is needed for OIDC federation.
steps:
- name: Checkout Repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Setup Python
uses: ./.github/actions/python
- name: Setup UDS Cluster
uses: ./.github/actions/uds-cluster
with:
registry1Username: ${{ secrets.IRON_BANK_ROBOT_USERNAME }}
registry1Password: ${{ secrets.IRON_BANK_ROBOT_PASSWORD }}
ghToken: ${{ secrets.GITHUB_TOKEN }}
chainguardIdentity: ${{ secrets.CHAINGUARD_IDENTITY }}
- name: Setup API and Supabase
uses: ./.github/actions/lfai-core
##########
# llama
##########
- name: Deploy llama-cpp-python
run: |
make build-llama-cpp-python LOCAL_VERSION=e2e-test
docker image prune -af
uds zarf package deploy packages/llama-cpp-python/zarf-package-llama-cpp-python-amd64-e2e-test.tar.zst -l=trace --confirm
rm packages/llama-cpp-python/zarf-package-llama-cpp-python-amd64-e2e-test.tar.zst
- name: Test llama-cpp-python
env:
LEAPFROGAI_MODEL: llama-cpp-python
run: |
python -m pytest ./tests/e2e/test_llm_generation.py -vv