Skip to content

Commit

Permalink
Stop setting unit IP on every possible event and choose the right one…
Browse files Browse the repository at this point in the history
… instead (#286)

* Stop setting unit IP on every possible event and choose instead

* Review comments

* Blackbox testing instead
  • Loading branch information
rbarry82 authored May 20, 2022
1 parent a3d80a0 commit 010761f
Show file tree
Hide file tree
Showing 2 changed files with 104 additions and 19 deletions.
38 changes: 27 additions & 11 deletions lib/charms/prometheus_k8s/v0/prometheus_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ def _on_scrape_targets_changed(self, event):

import yaml
from ops.charm import CharmBase, RelationRole
from ops.framework import EventBase, EventSource, Object, ObjectEvents
from ops.framework import BoundEvent, EventBase, EventSource, Object, ObjectEvents

# The unique Charmhub library identifier, never change it
from ops.model import ModelError
Expand Down Expand Up @@ -1432,6 +1432,7 @@ def __init__(
relation_name: str = DEFAULT_RELATION_NAME,
jobs=None,
alert_rules_path: str = DEFAULT_ALERT_RULES_RELATIVE_PATH,
refresh_event: Optional[Union[BoundEvent, List[BoundEvent]]] = None,
):
"""Construct a metrics provider for a Prometheus charm.
Expand Down Expand Up @@ -1525,6 +1526,8 @@ def __init__(
files. Defaults to "./prometheus_alert_rules",
resolved relative to the directory hosting the charm entry file.
The alert rules are automatically updated on charm upgrade.
refresh_event: an optional bound event or list of bound events which
will be observed to re-set scrape job data (IP address and others)
Raises:
RelationNotFoundError: If there is no relation in the charm's metadata.yaml
Expand Down Expand Up @@ -1563,17 +1566,30 @@ def __init__(
self.framework.observe(events.relation_joined, self._set_scrape_job_spec)
self.framework.observe(events.relation_changed, self._set_scrape_job_spec)

# dirty fix: set the ip address when the containers start, as a workaround
# for not being able to lookup the pod ip
for container_name in charm.unit.containers:
self.framework.observe(
charm.on[container_name].pebble_ready,
self._set_unit_ip,
)
if not refresh_event:
if len(self._charm.meta.containers) == 1:
if "kubernetes" in self._charm.meta.series:
# This is a podspec charm
refresh_event = [self._charm.on.update_status]
else:
# This is a sidecar/pebble charm
container = list(self._charm.meta.containers.values())[0]
refresh_event = [self._charm.on[container.name.replace("-", "_")].pebble_ready]
else:
logger.warning(
"%d containers are present in metadata.yaml and "
"refresh_event was not specified. Defaulting to update_status. "
"Metrics IP may not be set in a timely fashion.",
len(self._charm.meta.containers),
)
refresh_event = [self._charm.on.update_status]

else:
if not isinstance(refresh_event, list):
refresh_event = [refresh_event]

# podspec charms do not have a pebble ready event so unit ips need to be set these events
self.framework.observe(self._charm.on.update_status, self._set_unit_ip)
self.framework.observe(self._charm.on.config_changed, self._set_unit_ip)
for ev in refresh_event:
self.framework.observe(ev, self._set_unit_ip)

self.framework.observe(self._charm.on.upgrade_charm, self._set_scrape_job_spec)

Expand Down
85 changes: 77 additions & 8 deletions tests/unit/test_endpoint_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
interface: prometheus_scrape
"""


JOBS: List[dict] = [
{
"global": {"scrape_interval": "1h"},
Expand Down Expand Up @@ -72,6 +73,20 @@ def __init__(self, *args, **kwargs):
)


class EndpointProviderCharmWithMultipleEvents(CharmBase):
_stored = StoredState()

def __init__(self, *args, **kwargs):
super().__init__(*args)

self.provider = MetricsEndpointProvider(
self,
jobs=JOBS,
alert_rules_path="./tests/unit/prometheus_alert_rules",
refresh_event=[self.on.prometheus_tester_pebble_ready, self.on.config_changed],
)


class TestEndpointProvider(unittest.TestCase):
def setUp(self):
self.harness = Harness(EndpointProviderCharm, meta=PROVIDER_META)
Expand Down Expand Up @@ -144,18 +159,72 @@ def test_provider_sets_scrape_metadata(self, _):
self.assertIn("model_uuid", scrape_metadata)
self.assertIn("application", scrape_metadata)

@patch_network_get(private_address="192.0.8.2")
def test_provider_unit_sets_bind_address_on_pebble_ready(self, *unused):
rel_id = self.harness.add_relation(RELATION_NAME, "provider")
@patch(
"charms.prometheus_k8s.v0.prometheus_scrape.MetricsEndpointProvider._set_unit_ip",
autospec=True,
)
def test_provider_selects_correct_refresh_event_for_sidecar(self, mock_set_unit_ip):
self.harness.add_relation(RELATION_NAME, "provider")

self.harness.container_pebble_ready("prometheus-tester")
data = self.harness.get_relation_data(rel_id, self.harness.charm.unit.name)
self.assertIn("prometheus_scrape_unit_address", data)
self.assertEqual(data["prometheus_scrape_unit_address"], "192.0.8.2")
self.assertEqual(mock_set_unit_ip.call_count, 1)

@patch(
"charms.prometheus_k8s.v0.prometheus_scrape.MetricsEndpointProvider._set_unit_ip",
autospec=True,
)
def test_provider_selects_correct_refresh_event_for_podspec(self, mock_set_unit_ip):
"""Tests that Provider raises exception if the default relation has the wrong role."""
harness = Harness(
EndpointProviderCharm,
# No provider relation with `prometheus_scrape` as interface
meta=f"""
name: provider-tester
containers:
prometheus-tester:
provides:
{RELATION_NAME}:
interface: prometheus_scrape
series:
- kubernetes
""",
)
harness.begin()
harness.charm.on.update_status.emit()
self.assertEqual(mock_set_unit_ip.call_count, 1)

@patch(
"charms.prometheus_k8s.v0.prometheus_scrape.MetricsEndpointProvider._set_unit_ip",
autospec=True,
)
def test_provider_can_refresh_on_multiple_events(self, mock_set_unit_ip):
harness = Harness(
EndpointProviderCharmWithMultipleEvents,
# No provider relation with `prometheus_scrape` as interface
meta=f"""
name: provider-tester
containers:
prometheus-tester:
provides:
{RELATION_NAME}:
interface: prometheus_scrape
series:
- kubernetes
""",
)
harness.begin()
harness.add_relation(RELATION_NAME, "provider")

harness.charm.on.config_changed.emit()
self.assertEqual(mock_set_unit_ip.call_count, 1)

harness.container_pebble_ready("prometheus-tester")
self.assertEqual(mock_set_unit_ip.call_count, 2)

@patch_network_get(private_address="192.0.8.2")
def test_provider_unit_sets_bind_address_on_update_status(self, *unused):
def test_provider_unit_sets_bind_address_on_pebble_ready(self, *unused):
rel_id = self.harness.add_relation(RELATION_NAME, "provider")
self.harness.charm.on.update_status.emit()
self.harness.container_pebble_ready("prometheus-tester")
data = self.harness.get_relation_data(rel_id, self.harness.charm.unit.name)
self.assertIn("prometheus_scrape_unit_address", data)
self.assertEqual(data["prometheus_scrape_unit_address"], "192.0.8.2")
Expand Down

0 comments on commit 010761f

Please sign in to comment.