-
Notifications
You must be signed in to change notification settings - Fork 35
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fix scrape jobs so that there are no duplicate job names. (#292)
* Fix scrape jobs so that there are no duplicate job names. This is because duplicate job names cause errors in the prometheus config * formatting * only use multiple jobs for the tests that require it * Better wording Co-authored-by: Leon <[email protected]> * Bad comment Co-authored-by: Leon <[email protected]> * shorten comment for linter * make comments more clear * Type hint Co-authored-by: Leon <[email protected]> * ensure function does not modify argument * must be trusted in order to get volume size * use upstream-source Co-authored-by: Leon <[email protected]>
- Loading branch information
Showing
6 changed files
with
184 additions
and
7 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
#!/usr/bin/env python3 | ||
# Copyright 2022 Canonical Ltd. | ||
# See LICENSE file for licensing details. | ||
|
||
import asyncio | ||
import json | ||
|
||
from helpers import get_prometheus_active_targets, oci_image | ||
from pytest_operator.plugin import OpsTest | ||
|
||
prometheus_app_name = "prometheus" | ||
prometheus_resources = {"prometheus-image": oci_image("./metadata.yaml", "prometheus-image")} | ||
tester_app_name = "tester" | ||
tester_resources = { | ||
"prometheus-tester-image": oci_image( | ||
"./tests/integration/prometheus-tester/metadata.yaml", | ||
"prometheus-tester-image", | ||
) | ||
} | ||
|
||
|
||
async def test_multiple_scrape_jobs_in_constructor( | ||
ops_test: OpsTest, prometheus_charm, prometheus_tester_charm | ||
): | ||
"""Test that job names are properly deduped when in the same consumer unit.""" | ||
jobs = [ | ||
{ | ||
"scrape_interval": "10s", | ||
"static_configs": [{"targets": ["*:8000"]}], | ||
}, | ||
{ | ||
"scrape_interval": "10s", | ||
"static_configs": [{"targets": ["*:8000"]}], | ||
}, | ||
{ | ||
"scrape_interval": "10s", | ||
"static_configs": [{"targets": ["*:8001"]}], | ||
}, | ||
] | ||
await asyncio.gather( | ||
ops_test.model.deploy( | ||
prometheus_charm, | ||
resources=prometheus_resources, | ||
application_name=prometheus_app_name, | ||
trust=True, | ||
), | ||
ops_test.model.deploy( | ||
prometheus_tester_charm, | ||
resources=tester_resources, | ||
application_name=tester_app_name, | ||
config={"scrape_jobs": json.dumps(jobs)}, | ||
), | ||
) | ||
await ops_test.model.add_relation(prometheus_app_name, tester_app_name) | ||
await ops_test.model.wait_for_idle(status="active") | ||
|
||
targets = await get_prometheus_active_targets(ops_test, prometheus_app_name) | ||
# Two unique jobs above plus an additional an additional job for self scraping. | ||
assert len(targets) == 3 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
# Copyright 2022 Canonical Ltd. | ||
# See LICENSE file for licensing details. | ||
|
||
import copy | ||
import unittest | ||
|
||
import deepdiff | ||
from charms.prometheus_k8s.v0.prometheus_scrape import _dedupe_job_names | ||
|
||
|
||
class TestFunctions(unittest.TestCase): | ||
def test_dedupe_job_names(self): | ||
jobs = [ | ||
{ | ||
"job_name": "job0", | ||
"static_configs": [{"targets": ["localhost:9090"]}], | ||
"scrape_interval": "5s", | ||
}, | ||
{ | ||
"job_name": "job0", | ||
"static_configs": [{"targets": ["localhost:9090"]}], | ||
"scrape_interval": "5s", | ||
}, | ||
{ | ||
"job_name": "job1", | ||
"static_configs": [{"targets": ["localhost:9090"]}], | ||
"scrape_interval": "5s", | ||
}, | ||
{ | ||
"job_name": "job0", | ||
"static_configs": [{"targets": ["localhost:9090"]}], | ||
"scrape_interval": "10s", | ||
}, | ||
{ | ||
"job_name": "job0", | ||
"static_configs": [{"targets": ["localhost:9091"]}], | ||
"scrape_interval": "5s", | ||
}, | ||
] | ||
jobs_original = copy.deepcopy(jobs) | ||
expected = [ | ||
{ | ||
"job_name": "job0_6f9f1c305506707b952aef3885fa099fe36158f6359b8a06634068270645aefd", | ||
"scrape_interval": "5s", | ||
"static_configs": [{"targets": ["localhost:9090"]}], | ||
}, | ||
{ | ||
"job_name": "job0_c651cf3a8cd1b85abc0cf7620e058b87ef43e2296d1520328ce5a796e9b20993", | ||
"scrape_interval": "10s", | ||
"static_configs": [{"targets": ["localhost:9090"]}], | ||
}, | ||
{ | ||
"job_name": "job0_546b5bbb56e719d894b0a557975e0926ed093ea547c87051595d953122d2a7d6", | ||
"scrape_interval": "5s", | ||
"static_configs": [{"targets": ["localhost:9091"]}], | ||
}, | ||
{ | ||
"job_name": "job1", | ||
"scrape_interval": "5s", | ||
"static_configs": [{"targets": ["localhost:9090"]}], | ||
}, | ||
] | ||
self.assertTrue(len(deepdiff.DeepDiff(_dedupe_job_names(jobs), expected)) == 0) | ||
# Make sure the function does not modify its argument | ||
self.assertEqual(jobs, jobs_original) |