forked from canonical/data-platform-workflows
-
Notifications
You must be signed in to change notification settings - Fork 0
440 lines (410 loc) · 18.4 KB
/
integration_test_charm.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
# Copyright 2023 Canonical Ltd.
# See LICENSE file for licensing details.
# Usage documentation: integration_test_charm.md
on:
workflow_call:
inputs:
artifact-prefix:
description: |
Prefix for charm package GitHub artifact(s)
Use canonical/data-platform-workflows build_charm.yaml to build the charm(s)
required: true
type: string
cloud:
# Keep description synchronized with "Validate input" step
description: |
Juju cloud
Must be one of: "lxd", "microk8s"
https://juju.is/docs/olm/cloud
required: true
type: string
microk8s-snap-channel:
description: |
microk8s snap channel
Required if `cloud` input is "microk8s"
required: false
type: string
lxd-snap-channel:
description: LXD snap channel
default: latest/stable
type: string
juju-agent-version:
description: Juju agent version
required: false
type: string
juju-snap-channel:
description: |
Juju CLI snap channel
Required if `juju-agent-version` input is not passed
required: false
type: string
libjuju-version-constraint:
description: |
Poetry-compatible python-libjuju version constraint (e.g. "^1.2.3", ">= 1.2, < 1.5")
https://python-poetry.org/docs/dependency-specification/#version-constraints
Overrides python-libjuju version constraint in Poetry `integration` dependency group
Each version of python-libjuju is only compatible with one major Juju version
Recommendation: With Poetry, pin python-libjuju for the latest major Juju version.
To test older major Juju versions, override the version constraint with this input.
required: false
type: string
_beta_allure_report:
description: |
(BETA) Enable Allure Report
Subject to breaking changes without major version bump.
Not currently a public interface—only for testing
If this workflow is called with a matrix, this value can only be `true` for one
combination in the matrix
default: false
type: boolean
secrets:
integration-test:
description: |
Secrets needed in integration tests
Passed to tests with `SECRETS_FROM_GITHUB` environment variable
Use a string representation of a Python dict[str, str] built from multiple GitHub secrets
Do NOT put the string into a single GitHub secret—build the string from multiple GitHub secrets so that GitHub is more likely to redact the secrets in GitHub Actions logs.
Python code to verify the string format:
```
import ast
secrets = ast.literal_eval("")
assert isinstance(secrets, dict)
for key, value in secrets.items():
assert isinstance(key, str) and isinstance(value, str)
```
required: false
jobs:
get-workflow-version:
name: Get workflow version
uses: ./.github/workflows/_get_workflow_version.yaml
with:
repository-name: canonical/data-platform-workflows
file-name: integration_test_charm.yaml
collect-integration-tests:
# In nested CI calls (e.g. release.yaml calls ci.yaml calls this workflow) the job name in
# ci.yaml will not show up on the GitHub Actions sidebar.
# If this workflow is called with a matrix (e.g. to test multiple juju versions), the ci.yaml
# job name containing the Juju version will be lost.
# So, we add the Juju version to one of the first jobs in this workflow.
# (In the UI, when this workflow is called with a matrix, GitHub will separate each matrix
# combination and preserve job ordering within a matrix combination.)
name: ${{ inputs.juju-agent-version || inputs.juju-snap-channel }} | Collect integration test groups
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install tox & poetry
run: |
pipx install tox
pipx install poetry
- name: Select test stability level
id: select-test-stability
shell: python
run: |
import os
if "${{ github.event_name }}" == "schedule":
print("Running unstable and stable tests")
output = "mark_expression="
else:
print("Skipping unstable tests")
output = "mark_expression=not unstable"
with open(os.environ["GITHUB_OUTPUT"], "a") as file:
file.write(output)
- name: Collect test groups
id: collect-groups
run: tox run -e integration -- tests/integration -m '${{ steps.select-test-stability.outputs.mark_expression }}' --collect-groups
outputs:
groups: ${{ steps.collect-groups.outputs.groups }}
integration-test:
strategy:
fail-fast: false
matrix:
groups: ${{ fromJSON(needs.collect-integration-tests.outputs.groups) }}
name: ${{ matrix.groups.job_name }}
needs:
- get-workflow-version
- collect-integration-tests
runs-on: ${{ matrix.groups.runner || 'ubuntu-latest' }}
timeout-minutes: 120
steps:
- name: Free up disk space
if: ${{ !matrix.groups.self_hosted }}
run: |
printf '\nDisk usage before cleanup\n'
df --human-readable
# Based on https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
rm -r /usr/share/dotnet
rm -r /opt/hostedtoolcache/
printf '\nDisk usage after cleanup\n'
df --human-readable
- name: (self-hosted) Disk usage
if: ${{ matrix.groups.self_hosted }}
run: df --human-readable
- name: (self-hosted) Install pipx
if: ${{ matrix.groups.self_hosted }}
run: |
sudo apt-get update
sudo apt-get install python3-pip python3-venv -y
python3 -m pip install --user pipx
python3 -m pipx ensurepath
- name: Install CLI
run: pipx install git+https://github.com/canonical/data-platform-workflows@'${{ needs.get-workflow-version.outputs.version }}'#subdirectory=python/cli
- name: Redact secrets from log
run: redact-secrets
env:
SECRETS: ${{ secrets.integration-test }}
- name: Parse cloud input
id: parse-cloud
shell: python
# Keep synchronized with inputs.cloud description
run: |
import json
import os
CLOUD = "${{ inputs.cloud }}"
self_hosted = json.loads("${{ matrix.groups.self_hosted }}")
assert isinstance(self_hosted, bool)
if CLOUD == "lxd":
group = "lxd"
elif CLOUD == "microk8s":
if self_hosted:
raise ValueError("microk8s not supported on self-hosted runners")
SNAP_CHANNEL = "${{ inputs.microk8s-snap-channel }}"
assert (
SNAP_CHANNEL != ""
), '`microk8s-snap-channel` input required if `cloud` is "microk8s"'
assert "strict" in SNAP_CHANNEL.lower(), "Only strict microk8s snap supported"
group = "snap_microk8s"
else:
raise ValueError(f"`cloud` input not recognized: {CLOUD}")
output = f"group={group}"
print(output)
with open(os.environ["GITHUB_OUTPUT"], "a") as file:
file.write(output)
- name: Parse Juju agent version & snap channel
id: parse-versions
shell: python
run: |
import os
AGENT_VERSION = "${{ inputs.juju-agent-version }}"
snap_channel = "${{ inputs.juju-snap-channel }}"
if not snap_channel:
if AGENT_VERSION:
major, minor, patch = AGENT_VERSION.split(".")
snap_channel = f"{major}.{minor}/stable"
else:
raise Exception(
"`juju-snap-channel` required if `juju-agent-version` is not passed"
)
output = "agent_bootstrap_option="
if AGENT_VERSION:
output += f"--agent-version={AGENT_VERSION}"
output += f"\nsnap_channel={snap_channel}"
# GitHub artifact name cannot contain "/"
output += f'\nsnap_channel_for_artifact={snap_channel.replace("/", "-")}'
print(output)
with open(os.environ["GITHUB_OUTPUT"], "a") as file:
file.write(output)
- name: Checkout
uses: actions/checkout@v4
- name: Set up environment
run: |
# `--classic` applies to juju 2 snap; ignored for juju 3 snap
sudo snap install juju --classic --channel='${{ steps.parse-versions.outputs.snap_channel }}'
- name: Set up lxd
if: ${{ inputs.cloud == 'lxd' }}
run: |
sudo snap refresh lxd --channel='${{ inputs.lxd-snap-channel }}'
sudo adduser "$USER" '${{ steps.parse-cloud.outputs.group }}'
# `newgrp` does not work in GitHub Actions; use `sg` instead
sg '${{ steps.parse-cloud.outputs.group }}' -c "lxd waitready"
sg '${{ steps.parse-cloud.outputs.group }}' -c "lxd init --auto"
sg '${{ steps.parse-cloud.outputs.group }}' -c "lxc network set lxdbr0 ipv6.address none"
sudo iptables -F FORWARD
sudo iptables -P FORWARD ACCEPT
- name: Set up microk8s
if: ${{ inputs.cloud == 'microk8s' }}
run: |
sudo apt-get update
sudo apt-get install retry -y
sudo snap install microk8s --channel='${{ inputs.microk8s-snap-channel }}'
sudo adduser "$USER" '${{ steps.parse-cloud.outputs.group }}'
# `newgrp` does not work in GitHub Actions; use `sg` instead
sg '${{ steps.parse-cloud.outputs.group }}' -c "microk8s status --wait-ready"
sg '${{ steps.parse-cloud.outputs.group }}' -c "retry --times 3 --delay 5 -- sudo microk8s enable dns"
sg '${{ steps.parse-cloud.outputs.group }}' -c "microk8s status --wait-ready"
sg '${{ steps.parse-cloud.outputs.group }}' -c "microk8s.kubectl rollout status --namespace kube-system --watch --timeout=5m deployments/coredns"
sg '${{ steps.parse-cloud.outputs.group }}' -c "retry --times 3 --delay 5 -- sudo microk8s enable hostpath-storage"
sg '${{ steps.parse-cloud.outputs.group }}' -c "microk8s.kubectl rollout status --namespace kube-system --watch --timeout=5m deployments/hostpath-provisioner"
mkdir ~/.kube
# Used by lightkube and kubernetes (Python package)
sg '${{ steps.parse-cloud.outputs.group }}' -c "microk8s config > ~/.kube/config"
- run: snap list
- name: Set up environment
run: |
mkdir -p ~/.local/share/juju # Workaround for juju 3 strict snap
sg '${{ steps.parse-cloud.outputs.group }}' -c "juju bootstrap '${{ inputs.cloud }}' '${{ steps.parse-versions.outputs.agent_bootstrap_option }}'"
- name: (self-hosted) Set up Juju proxy config
if: ${{ matrix.groups.self_hosted }}
# `https-proxy` sets `HTTPS_PROXY` environment variable inside Juju machines
# (same for `http-proxy` -> `HTTP_PROXY` and `no-proxy` -> `NO_PROXY`)
# Self-hosted runners require proxy
run: juju model-defaults http-proxy="$HTTP_PROXY" https-proxy="$HTTPS_PROXY" no-proxy="$NO_PROXY"
- name: Set up environment
run: |
juju model-defaults logging-config='<root>=INFO; unit=DEBUG'
juju add-model test
pipx install tox
pipx install poetry
- name: Update python-libjuju version
if: ${{ inputs.libjuju-version-constraint }}
run: poetry add --lock --group integration juju@'${{ inputs.libjuju-version-constraint }}'
- name: Download packed charm(s)
uses: actions/download-artifact@v4
with:
pattern: ${{ inputs.artifact-prefix }}-*
merge-multiple: true
- name: Select test stability level
id: select-test-stability
shell: python
run: |
import os
if "${{ github.event_name }}" == "schedule":
print("Running unstable and stable tests")
output = "mark_expression="
else:
print("Skipping unstable tests")
output = "mark_expression=not unstable"
with open(os.environ["GITHUB_OUTPUT"], "a") as file:
file.write(output)
- name: (beta) Get Allure option
if: ${{ inputs._beta_allure_report }}
id: allure-option
# TODO future improvement: check if allure-pytest installed instead
shell: python
run: |
import os
output = "option=--alluredir=allure-results"
with open(os.environ["GITHUB_OUTPUT"], "a") as file:
file.write(output)
- name: Run integration tests
id: tests
run: sg '${{ steps.parse-cloud.outputs.group }}' -c "tox run -e integration -- '${{ matrix.groups.path_to_test_file }}' --group='${{ matrix.groups.group_id }}' -m '${{ steps.select-test-stability.outputs.mark_expression }}' --model test ${{ steps.allure-option.outputs.option }}"
env:
SECRETS_FROM_GITHUB: ${{ secrets.integration-test }}
- name: (beta) Upload Allure results
if: ${{ (success() || (failure() && steps.tests.outcome == 'failure')) && inputs._beta_allure_report && github.event_name == 'schedule' && github.run_attempt == '1' }}
uses: actions/upload-artifact@v4
with:
name: allure-results-integration-test-charm-${{ inputs.cloud }}-juju-${{ inputs.juju-agent-version || steps.parse-versions.outputs.snap_channel_for_artifact }}-${{ matrix.groups.artifact_group_id }}
path: allure-results/
if-no-files-found: error
- name: Select model
if: ${{ success() || (failure() && steps.tests.outcome == 'failure') }}
run: |
juju switch test
mkdir ~/logs/
- name: juju status
if: ${{ success() || (failure() && steps.tests.outcome == 'failure') }}
run: juju status --color --relations | tee ~/logs/juju-status.txt
- name: juju debug-log
if: ${{ success() || (failure() && steps.tests.outcome == 'failure') }}
run: juju debug-log --color --replay --no-tail | tee ~/logs/juju-debug-log.txt
- name: Upload logs
if: ${{ success() || (failure() && steps.tests.outcome == 'failure') }}
uses: actions/upload-artifact@v4
with:
name: logs-intergration-test-charm-${{ inputs.cloud }}-juju-${{ inputs.juju-agent-version || steps.parse-versions.outputs.snap_channel_for_artifact }}-${{ matrix.groups.artifact_group_id }}
path: ~/logs/
if-no-files-found: error
- name: Disk usage
if: ${{ success() || (failure() && steps.tests.outcome == 'failure') }}
run: df --human-readable
outputs:
juju-snap-channel-for-artifact: ${{ steps.parse-versions.outputs.snap_channel_for_artifact }}
allure-report:
# TODO future improvement: use concurrency group for job
name: (beta) Publish Allure report
if: ${{ !cancelled() && inputs._beta_allure_report && github.event_name == 'schedule' && github.run_attempt == '1'}}
needs:
- integration-test
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Download Allure
# Following instructions from https://allurereport.org/docs/gettingstarted-installation/#install-via-the-system-package-manager-for-linux
run: gh release download --repo allure-framework/allure2 --pattern 'allure_*.deb'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Install Allure
run: |
sudo apt-get update
sudo apt-get install ./allure_*.deb -y
# For first run, manually create branch with no history
# (e.g.
# git checkout --orphan gh-pages-beta
# git rm -rf .
# touch .nojekyll
# git add .nojekyll
# git commit -m "Initial commit"
# git push origin gh-pages-beta
# )
- name: Checkout GitHub pages branch
uses: actions/checkout@v4
with:
ref: gh-pages-beta
path: repo/
- name: Download test results
uses: actions/download-artifact@v4
with:
path: allure-results/
pattern: allure-results-integration-test-charm-${{ inputs.cloud }}-juju-${{ inputs.juju-agent-version || needs.integration-test.outputs.juju-snap-channel-for-artifact }}-*
merge-multiple: true
- name: Load test report history
run: |
if [[ -d repo/_latest/history/ ]]
then
echo 'Loading history'
cp -r repo/_latest/history/ allure-results/
fi
- name: Create executor.json
shell: python
run: |
# Reverse engineered from https://github.com/simple-elf/allure-report-action/blob/eca283b643d577c69b8e4f048dd6cd8eb8457cfd/entrypoint.sh
import json
DATA = {
"name": "GitHub Actions",
"type": "github",
"buildOrder": ${{ github.run_number }}, # TODO future improvement: use run ID
"buildName": "Run ${{ github.run_id }}",
"buildUrl": "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
"reportUrl": "../${{ github.run_number }}/",
}
with open("allure-results/executor.json", "w") as file:
json.dump(DATA, file)
- name: Generate Allure report
run: allure generate
- name: Create index.html
shell: python
run: |
DATA = f"""<!DOCTYPE html>
<meta charset="utf-8">
<meta http-equiv="cache-control" content="no-cache">
<meta http-equiv="refresh" content="0; url=${{ github.run_number }}">
"""
with open("repo/index.html", "w") as file:
file.write(DATA)
- name: Update GitHub pages branch
working-directory: repo/
# TODO future improvement: commit message
run: |
mkdir '${{ github.run_number }}'
rm -f _latest
ln -s '${{ github.run_number }}' _latest
cp -r ../allure-report/. _latest/
git add .
git config user.name "GitHub Actions"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -m "Allure report #${{ github.run_number }}"
# Uses token set in checkout step
git push origin gh-pages-beta