Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update charm libraries #4

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def _on_certificate_removed(self, event: CertificateRemovedEvent):
import logging
from typing import List

from jsonschema import exceptions, validate # type: ignore[import]
from jsonschema import exceptions, validate # type: ignore[import-untyped]
from ops.charm import CharmBase, CharmEvents, RelationBrokenEvent, RelationChangedEvent
from ops.framework import EventBase, EventSource, Handle, Object

Expand All @@ -109,7 +109,7 @@ def _on_certificate_removed(self, event: CertificateRemovedEvent):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 4
LIBPATCH = 5

PYDEPS = ["jsonschema"]

Expand Down
62 changes: 43 additions & 19 deletions lib/charms/loki_k8s/v0/loki_push_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
implement the provider side of the `loki_push_api` relation interface. For instance, a Loki charm.
The provider side of the relation represents the server side, to which logs are being pushed.

- `LokiPushApiConsumer`: This object is meant to be used by any Charmed Operator that needs to
send log to Loki by implementing the consumer side of the `loki_push_api` relation interface.
For instance, a Promtail or Grafana agent charm which needs to send logs to Loki.
- `LokiPushApiConsumer`: Used to obtain the loki api endpoint. This is useful for configuring
applications such as pebble, or charmed operators of workloads such as grafana-agent or promtail,
that can communicate with loki directly.

- `LogProxyConsumer`: This object can be used by any Charmed Operator which needs to
send telemetry, such as logs, to Loki through a Log Proxy by implementing the consumer side of the
Expand Down Expand Up @@ -456,7 +456,7 @@ def _alert_rules_error(self, event):
from urllib.error import HTTPError

import yaml
from charms.observability_libs.v0.juju_topology import JujuTopology
from cosl import JujuTopology
from ops.charm import (
CharmBase,
HookEvent,
Expand All @@ -480,7 +480,7 @@ def _alert_rules_error(self, event):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 22
LIBPATCH = 26

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -604,7 +604,9 @@ def _validate_relation_by_interface_and_direction(
actual_relation_interface = relation.interface_name
if actual_relation_interface != expected_relation_interface:
raise RelationInterfaceMismatchError(
relation_name, expected_relation_interface, actual_relation_interface
relation_name,
expected_relation_interface,
actual_relation_interface, # pyright: ignore
)

if expected_relation_role == RelationRole.provides:
Expand Down Expand Up @@ -866,20 +868,20 @@ def _from_dir(self, dir_path: Path, recursive: bool) -> List[dict]:

return alert_groups

def add_path(self, path: str, *, recursive: bool = False):
def add_path(self, path_str: str, *, recursive: bool = False):
"""Add rules from a dir path.

All rules from files are aggregated into a data structure representing a single rule file.
All group names are augmented with juju topology.

Args:
path: either a rules file or a dir of rules files.
path_str: either a rules file or a dir of rules files.
recursive: whether to read files recursively or not (no impact if `path` is a file).

Raises:
InvalidAlertRulePathError: if the provided path is invalid.
"""
path = Path(path) # type: Path
path = Path(path_str) # type: Path
if path.is_dir():
self.alert_groups.extend(self._from_dir(path, recursive))
elif path.is_file():
Expand Down Expand Up @@ -992,6 +994,8 @@ def __init__(self, handle, relation, relation_id, app=None, unit=None):

def snapshot(self) -> Dict:
"""Save event information."""
if not self.relation:
return {}
snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id}
if self.app:
snapshot["app_name"] = self.app.name
Expand Down Expand Up @@ -1052,7 +1056,7 @@ class LokiPushApiEvents(ObjectEvents):
class LokiPushApiProvider(Object):
"""A LokiPushApiProvider class."""

on = LokiPushApiEvents()
on = LokiPushApiEvents() # pyright: ignore

def __init__(
self,
Expand Down Expand Up @@ -1146,11 +1150,11 @@ def _on_logging_relation_changed(self, event: HookEvent):
event: a `CharmEvent` in response to which the consumer
charm must update its relation data.
"""
should_update = self._process_logging_relation_changed(event.relation)
should_update = self._process_logging_relation_changed(event.relation) # pyright: ignore
if should_update:
self.on.loki_push_api_alert_rules_changed.emit(
relation=event.relation,
relation_id=event.relation.id,
relation=event.relation, # pyright: ignore
relation_id=event.relation.id, # pyright: ignore
app=self._charm.app,
unit=self._charm.unit,
)
Expand Down Expand Up @@ -1517,7 +1521,7 @@ def loki_endpoints(self) -> List[dict]:
class LokiPushApiConsumer(ConsumerBase):
"""Loki Consumer class."""

on = LokiPushApiEvents()
on = LokiPushApiEvents() # pyright: ignore

def __init__(
self,
Expand Down Expand Up @@ -1760,7 +1764,7 @@ class LogProxyConsumer(ConsumerBase):
role.
"""

on = LogProxyEvents()
on = LogProxyEvents() # pyright: ignore

def __init__(
self,
Expand Down Expand Up @@ -1885,7 +1889,7 @@ def _on_relation_departed(self, _: RelationEvent) -> None:
self._container.stop(WORKLOAD_SERVICE_NAME)
self.on.log_proxy_endpoint_departed.emit()

def _get_container(self, container_name: str = "") -> Container:
def _get_container(self, container_name: str = "") -> Container: # pyright: ignore
"""Gets a single container by name or using the only container running in the Pod.

If there is more than one container in the Pod a `PromtailDigestError` is emitted.
Expand Down Expand Up @@ -1959,7 +1963,9 @@ def _add_pebble_layer(self, workload_binary_path: str) -> None:
}
},
}
self._container.add_layer(self._container_name, pebble_layer, combine=True)
self._container.add_layer(
self._container_name, pebble_layer, combine=True # pyright: ignore
)

def _create_directories(self) -> None:
"""Creates the directories for Promtail binary and config file."""
Expand Down Expand Up @@ -1996,7 +2002,11 @@ def _push_binary_to_workload(self, binary_path: str, workload_binary_path: str)
"""
with open(binary_path, "rb") as f:
self._container.push(
workload_binary_path, f, permissions=0o755, encoding=None, make_dirs=True
workload_binary_path,
f,
permissions=0o755,
encoding=None, # pyright: ignore
make_dirs=True,
)
logger.debug("The promtail binary file has been pushed to the workload container.")

Expand Down Expand Up @@ -2105,7 +2115,21 @@ def _download_and_push_promtail_to_workload(self, promtail_info: dict) -> None:
- "zipsha": sha256 sum of zip file of promtail binary
- "binsha": sha256 sum of unpacked promtail binary
"""
with request.urlopen(promtail_info["url"]) as r:
# Check for Juju proxy variables and fall back to standard ones if not set
proxies: Optional[Dict[str, str]] = {}
if proxies and os.environ.get("JUJU_CHARM_HTTP_PROXY"):
proxies.update({"http": os.environ["JUJU_CHARM_HTTP_PROXY"]})
if proxies and os.environ.get("JUJU_CHARM_HTTPS_PROXY"):
proxies.update({"https": os.environ["JUJU_CHARM_HTTPS_PROXY"]})
if proxies and os.environ.get("JUJU_CHARM_NO_PROXY"):
proxies.update({"no_proxy": os.environ["JUJU_CHARM_NO_PROXY"]})
else:
proxies = None

proxy_handler = request.ProxyHandler(proxies)
opener = request.build_opener(proxy_handler)

with opener.open(promtail_info["url"]) as r:
file_bytes = r.read()
file_path = os.path.join(BINARY_DIR, promtail_info["filename"] + ".gz")
with open(file_path, "wb") as f:
Expand Down
28 changes: 17 additions & 11 deletions lib/charms/prometheus_k8s/v0/prometheus_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def _on_scrape_targets_changed(self, event):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 42
LIBPATCH = 44

PYDEPS = ["cosl"]

Expand All @@ -386,6 +386,7 @@ def _on_scrape_targets_changed(self, event):
"basic_auth",
"tls_config",
"authorization",
"params",
}
DEFAULT_JOB = {
"metrics_path": "/metrics",
Expand Down Expand Up @@ -764,7 +765,7 @@ def _validate_relation_by_interface_and_direction(
actual_relation_interface = relation.interface_name
if actual_relation_interface != expected_relation_interface:
raise RelationInterfaceMismatchError(
relation_name, expected_relation_interface, actual_relation_interface
relation_name, expected_relation_interface, actual_relation_interface or "None"
)

if expected_relation_role == RelationRole.provides:
Expand Down Expand Up @@ -857,7 +858,7 @@ class MonitoringEvents(ObjectEvents):
class MetricsEndpointConsumer(Object):
"""A Prometheus based Monitoring service."""

on = MonitoringEvents()
on = MonitoringEvents() # pyright: ignore

def __init__(self, charm: CharmBase, relation_name: str = DEFAULT_RELATION_NAME):
"""A Prometheus based Monitoring service.
Expand Down Expand Up @@ -1014,7 +1015,6 @@ def alerts(self) -> dict:
try:
scrape_metadata = json.loads(relation.data[relation.app]["scrape_metadata"])
identifier = JujuTopology.from_dict(scrape_metadata).identifier
alerts[identifier] = self._tool.apply_label_matchers(alert_rules) # type: ignore

except KeyError as e:
logger.debug(
Expand All @@ -1029,6 +1029,10 @@ def alerts(self) -> dict:
)
continue

# We need to append the relation info to the identifier. This is to allow for cases for there are two
# relations which eventually scrape the same application. Issue #551.
identifier = f"{identifier}_{relation.name}_{relation.id}"

alerts[identifier] = alert_rules

_, errmsg = self._tool.validate_alert_rules(alert_rules)
Expand Down Expand Up @@ -1294,7 +1298,7 @@ def _resolve_dir_against_charm_path(charm: CharmBase, *path_elements: str) -> st
class MetricsEndpointProvider(Object):
"""A metrics endpoint for Prometheus."""

on = MetricsEndpointProviderEvents()
on = MetricsEndpointProviderEvents() # pyright: ignore

def __init__(
self,
Expand Down Expand Up @@ -1836,14 +1840,16 @@ def _set_prometheus_data(self, event):
return

jobs = [] + _type_convert_stored(
self._stored.jobs
self._stored.jobs # pyright: ignore
) # list of scrape jobs, one per relation
for relation in self.model.relations[self._target_relation]:
targets = self._get_targets(relation)
if targets and relation.app:
jobs.append(self._static_scrape_job(targets, relation.app.name))

groups = [] + _type_convert_stored(self._stored.alert_rules) # list of alert rule groups
groups = [] + _type_convert_stored(
self._stored.alert_rules # pyright: ignore
) # list of alert rule groups
for relation in self.model.relations[self._alert_rules_relation]:
unit_rules = self._get_alert_rules(relation)
if unit_rules and relation.app:
Expand Down Expand Up @@ -1895,7 +1901,7 @@ def set_target_job_data(self, targets: dict, app_name: str, **kwargs) -> None:
jobs.append(updated_job)
relation.data[self._charm.app]["scrape_jobs"] = json.dumps(jobs)

if not _type_convert_stored(self._stored.jobs) == jobs:
if not _type_convert_stored(self._stored.jobs) == jobs: # pyright: ignore
self._stored.jobs = jobs

def _on_prometheus_targets_departed(self, event):
Expand Down Expand Up @@ -1947,7 +1953,7 @@ def remove_prometheus_jobs(self, job_name: str, unit_name: Optional[str] = ""):

relation.data[self._charm.app]["scrape_jobs"] = json.dumps(jobs)

if not _type_convert_stored(self._stored.jobs) == jobs:
if not _type_convert_stored(self._stored.jobs) == jobs: # pyright: ignore
self._stored.jobs = jobs

def _job_name(self, appname) -> str:
Expand Down Expand Up @@ -2126,7 +2132,7 @@ def set_alert_rule_data(self, name: str, unit_rules: dict, label_rules: bool = T
groups.append(updated_group)
relation.data[self._charm.app]["alert_rules"] = json.dumps({"groups": groups})

if not _type_convert_stored(self._stored.alert_rules) == groups:
if not _type_convert_stored(self._stored.alert_rules) == groups: # pyright: ignore
self._stored.alert_rules = groups

def _on_alert_rules_departed(self, event):
Expand Down Expand Up @@ -2176,7 +2182,7 @@ def remove_alert_rules(self, group_name: str, unit_name: str) -> None:
json.dumps({"groups": groups}) if groups else "{}"
)

if not _type_convert_stored(self._stored.alert_rules) == groups:
if not _type_convert_stored(self._stored.alert_rules) == groups: # pyright: ignore
self._stored.alert_rules = groups

def _get_alert_rules(self, relation) -> dict:
Expand Down
11 changes: 6 additions & 5 deletions lib/charms/prometheus_k8s/v1/prometheus_remote_write.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 2
LIBPATCH = 4

PYDEPS = ["cosl"]

Expand Down Expand Up @@ -211,7 +211,7 @@ def _validate_relation_by_interface_and_direction(
actual_relation_interface = relation.interface_name
if actual_relation_interface != expected_relation_interface:
raise RelationInterfaceMismatchError(
relation_name, expected_relation_interface, actual_relation_interface
relation_name, expected_relation_interface, actual_relation_interface or "None"
)

if expected_relation_role == RelationRole.provides:
Expand Down Expand Up @@ -394,7 +394,7 @@ def __init__(self, *args):
```
"""

on = PrometheusRemoteWriteConsumerEvents()
on = PrometheusRemoteWriteConsumerEvents() # pyright: ignore

def __init__(
self,
Expand Down Expand Up @@ -458,7 +458,7 @@ def _on_relation_broken(self, event: RelationBrokenEvent) -> None:
self.on.endpoints_changed.emit(relation_id=event.relation.id)

def _handle_endpoints_changed(self, event: RelationEvent) -> None:
if self._charm.unit.is_leader():
if self._charm.unit.is_leader() and event.app is not None:
ev = json.loads(event.relation.data[event.app].get("event", "{}"))

if ev:
Expand Down Expand Up @@ -591,7 +591,7 @@ def __init__(self, *args):
name to differentiate between "incoming" and "outgoing" remote write interactions is necessary.
"""

on = PrometheusRemoteWriteProviderEvents()
on = PrometheusRemoteWriteProviderEvents() # pyright: ignore

def __init__(
self,
Expand Down Expand Up @@ -749,6 +749,7 @@ def alerts(self) -> dict:

_, errmsg = self._tool.validate_alert_rules(alert_rules)
if errmsg:
logger.error(f"Invalid alert rule file: {errmsg}")
if self._charm.unit.is_leader():
data = json.loads(relation.data[self._charm.app].get("event", "{}"))
data["errors"] = errmsg
Expand Down
Loading
Loading