Skip to content

Commit

Permalink
static fix
Browse files Browse the repository at this point in the history
  • Loading branch information
PietroPasotti committed Jun 21, 2024
1 parent e5c5ef0 commit 6d9277b
Show file tree
Hide file tree
Showing 7 changed files with 169 additions and 86 deletions.
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# pin importlib-metadata version else charmcraft pack will fail to resolve the dependencies for the pydeps-installed otlp library
importlib-metadata~=6.0.0
ops
crossplane
jsonschema==4.17.0
lightkube==0.11.0
lightkube-models==1.24.1.4
Expand Down
16 changes: 9 additions & 7 deletions src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,12 +133,8 @@ def __init__(self, *args):
# tracing
self.framework.observe(self.tracing.on.request, self._on_tracing_request)
self.framework.observe(self.tracing.on.broken, self._on_tracing_broken)
self.framework.observe(
self.on.peers_relation_created, self._on_peers_relation_created
)
self.framework.observe(
self.on.peers_relation_changed, self._on_peers_relation_changed
)
self.framework.observe(self.on.peers_relation_created, self._on_peers_relation_created)
self.framework.observe(self.on.peers_relation_changed, self._on_peers_relation_changed)

# tls
self.framework.observe(self.cert_handler.on.cert_changed, self._on_cert_handler_changed)
Expand Down Expand Up @@ -237,7 +233,13 @@ def peer_addresses(self) -> List[str]:
@property
def _local_ip(self) -> Optional[str]:
try:
return str(self.model.get_binding("peers").network.bind_address)
binding = self.model.get_binding("peers")
if not binding:
logger.error("unable to get local IP at this time: "
"peers binding not active yet. It could be that the charm "
"is still being set up...")
return None
return str(binding.network.bind_address)
except (ops.ModelError, KeyError) as e:
logger.debug("failed to obtain local ip from peers binding", exc_info=True)
logger.error(
Expand Down
5 changes: 2 additions & 3 deletions src/coordinator.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import collections
import logging
from collections import Counter
from typing import Dict, List, Set
from typing import Dict, List, Set, Optional

from tempo_cluster import TempoClusterProvider, TempoRole

Expand Down Expand Up @@ -72,7 +71,7 @@ def get_deployment_inconsistencies(self, has_s3: bool) -> List[str]:
def _get_deployment_inconsistencies(
has_s3: bool,
coherent: bool,
missing_roles: Set[TempoRole] = None,
missing_roles: Optional[Set[TempoRole]] = None,
) -> List[str]:
"""Determine whether the deployment as a whole is consistent.
Expand Down
2 changes: 1 addition & 1 deletion src/tempo.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def generate_config(

return config

def _build_storage_config(self, s3_config: Optional[dict] = None):
def _build_storage_config(self, s3_config: dict):
storage_config = {
"wal": {
# where to store the wal locally
Expand Down
190 changes: 131 additions & 59 deletions src/tempo_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import json
import logging
from enum import Enum, unique
from typing import Any, Dict, List, MutableMapping, Optional, Set, cast
from typing import Any, Dict, List, MutableMapping, Optional, Set

import ops
import pydantic
Expand Down Expand Up @@ -108,59 +108,133 @@ class JujuTopology(pydantic.BaseModel):
# ...


class DatabagModel(BaseModel):
"""Base databag model."""

model_config = ConfigDict(
# Allow instantiating this class by field name (instead of forcing alias).
populate_by_name=True,
# Custom config key: whether to nest the whole datastructure (as json)
# under a field or spread it out at the toplevel.
_NEST_UNDER=None, # noqa
) # type: ignore
"""Pydantic config."""

@classmethod
def load(cls, databag: MutableMapping[str, str]):
"""Load this model from a Juju databag."""
nest_under = cls.model_config.get("_NEST_UNDER") # noqa
if nest_under:
return cls.parse_obj(json.loads(databag[cast(str, nest_under)]))

try:
data = {k: json.loads(v) for k, v in databag.items() if k not in BUILTIN_JUJU_KEYS}
except json.JSONDecodeError as e:
msg = f"invalid databag contents: expecting json. {databag}"
log.info(msg)
raise DataValidationError(msg) from e

try:
return cls.parse_raw(json.dumps(data)) # type: ignore
except pydantic.ValidationError as e:
msg = f"failed to validate databag: {databag}"
log.info(msg, exc_info=True)
raise DataValidationError(msg) from e

def dump(self, databag: Optional[MutableMapping[str, str]] = None, clear: bool = True):
"""Write the contents of this model to Juju databag.
:param databag: the databag to write the data to.
:param clear: ensure the databag is cleared before writing it.
"""
if clear and databag:
databag.clear()
# DatabagModel implementation from traefik.v1.ingress charm lib.
PYDANTIC_IS_V1 = int(pydantic.version.VERSION.split(".")[0]) < 2
if PYDANTIC_IS_V1:

class DatabagModel(BaseModel): # type: ignore
"""Base databag model."""

class Config:
"""Pydantic config."""

allow_population_by_field_name = True
"""Allow instantiating this class by field name (instead of forcing alias)."""

_NEST_UNDER = None

@classmethod
def load(cls, databag: MutableMapping):
"""Load this model from a Juju databag."""
if cls._NEST_UNDER:
return cls.parse_obj(json.loads(databag[cls._NEST_UNDER]))

try:
data = {
k: json.loads(v)
for k, v in databag.items()
# Don't attempt to parse model-external values
if k in {f.alias for f in cls.__fields__.values()} # type: ignore
}
except json.JSONDecodeError as e:
msg = f"invalid databag contents: expecting json. {databag}"
log.error(msg)
raise DataValidationError(msg) from e

try:
return cls.parse_raw(json.dumps(data)) # type: ignore
except pydantic.ValidationError as e:
msg = f"failed to validate databag: {databag}"
log.debug(msg, exc_info=True)
raise DataValidationError(msg) from e

def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True):
"""Write the contents of this model to Juju databag.
:param databag: the databag to write the data to.
:param clear: ensure the databag is cleared before writing it.
"""
if clear and databag:
databag.clear()

if databag is None:
databag = {}

if self._NEST_UNDER:
databag[self._NEST_UNDER] = self.json(by_alias=True, exclude_defaults=True)
return databag

for key, value in self.dict(by_alias=True, exclude_defaults=True).items(): # type: ignore
databag[key] = json.dumps(value)

return databag

else:
from pydantic import ConfigDict

class DatabagModel(BaseModel):
"""Base databag model."""

model_config = ConfigDict(
# tolerate additional keys in databag
extra="ignore",
# Allow instantiating this class by field name (instead of forcing alias).
populate_by_name=True,
# Custom config key: whether to nest the whole datastructure (as json)
# under a field or spread it out at the toplevel.
_NEST_UNDER=None,
) # type: ignore
"""Pydantic config."""

@classmethod
def load(cls, databag: MutableMapping):
"""Load this model from a Juju databag."""
nest_under = cls.model_config.get("_NEST_UNDER")
if nest_under:
return cls.model_validate(json.loads(databag[nest_under])) # type: ignore

if databag is None:
databag = {}
nest_under = self.model_config.get("_NEST_UNDER") # noqa
if nest_under:
databag[nest_under] = self.json()
try:
data = {
k: json.loads(v)
for k, v in databag.items()
# Don't attempt to parse model-external values
if k in {(f.alias or n) for n, f in cls.__fields__.items()} # type: ignore
}
except json.JSONDecodeError as e:
msg = f"invalid databag contents: expecting json. {databag}"
log.error(msg)
raise DataValidationError(msg) from e

try:
return cls.model_validate_json(json.dumps(data)) # type: ignore
except pydantic.ValidationError as e:
msg = f"failed to validate databag: {databag}"
log.debug(msg, exc_info=True)
raise DataValidationError(msg) from e

def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True):
"""Write the contents of this model to Juju databag.
:param databag: the databag to write the data to.
:param clear: ensure the databag is cleared before writing it.
"""
if clear and databag:
databag.clear()

if databag is None:
databag = {}
nest_under = self.model_config.get("_NEST_UNDER")
if nest_under:
databag[nest_under] = self.model_dump_json( # type: ignore
by_alias=True,
# skip keys whose values are default
exclude_defaults=True,
)
return databag

dct = self.model_dump(by_alias=True)
for key, field in self.model_fields.items(): # type: ignore
value = dct[key]
databag[field.alias or key] = json.dumps(value)
return databag
dct = self.model_dump(mode="json", by_alias=True, exclude_defaults=True) # type: ignore
databag.update({k: json.dumps(v) for k, v in dct.items()})
return databag


class TempoClusterRequirerAppData(DatabagModel):
Expand Down Expand Up @@ -247,10 +321,10 @@ def publish_privkey(self, label: str) -> str:
def publish_data(
self,
tempo_config: Dict[str, Any],
tempo_receiver: Dict[str, Any] = None,
ca_cert: str = None,
server_cert: str = None,
privkey_secret_id: str = None,
tempo_receiver: Optional[Dict[ReceiverProtocol, Any]] = None,
ca_cert: Optional[str] = None,
server_cert: Optional[str] = None,
privkey_secret_id: Optional[str] = None,
loki_endpoints: Optional[Dict[str, str]] = None,
) -> None:
"""Publish the tempo config to all related tempo worker clusters."""
Expand Down Expand Up @@ -284,7 +358,6 @@ def gather_addresses_by_role(self) -> Dict[str, Set[str]]:

try:
worker_app_data = TempoClusterRequirerAppData.load(relation.data[relation.app])
worker_roles = set(worker_app_data.roles)
except DataValidationError as e:
log.info(f"invalid databag contents: {e}")
continue
Expand All @@ -293,8 +366,7 @@ def gather_addresses_by_role(self) -> Dict[str, Set[str]]:
try:
worker_data = TempoClusterRequirerUnitData.load(relation.data[worker_unit])
unit_address = worker_data.address
for role in worker_roles:
data[role].add(unit_address)
data[worker_app_data.role].add(unit_address)
except DataValidationError as e:
log.info(f"invalid databag contents: {e}")
continue
Expand Down
40 changes: 24 additions & 16 deletions tests/catan/test_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from catan import App, Catan
from scenario import Container, State

from tempo_cluster import TempoRole, TempoClusterRequirerAppData
from tempo_cluster import TempoClusterRequirerAppData, TempoRole

os.environ["CHARM_TRACING_ENABLED"] = "0"

Expand All @@ -18,9 +18,11 @@


def get_facade(name="facade"):
meta = Path(FACADE_CHARM_ROOT) / 'charmcraft.yaml'
meta = Path(FACADE_CHARM_ROOT) / "charmcraft.yaml"
if not meta.exists():
raise RuntimeError(f"{meta} not found: run facade_charm.update_endpoints before running this test")
raise RuntimeError(
f"{meta} not found: run facade_charm.update_endpoints before running this test"
)

facade = App.from_path(
FACADE_CHARM_ROOT,
Expand Down Expand Up @@ -145,9 +147,7 @@ def tempo_peers():

@pytest.fixture
def tempo_coordinator_state(tempo_peers):
return State(
relations=[tempo_peers]
)
return State(relations=[tempo_peers])


@pytest.fixture
Expand All @@ -163,18 +163,26 @@ def update_s3_facade_action():
"update",
params={
"endpoint": "provide-s3",
"app_data": json.dumps({
"access-key": "key",
"bucket": "tempo",
"endpoint": "http://1.2.3.4:9000",
"secret-key": "soverysecret",
})
}
"app_data": json.dumps(
{
"access-key": "key",
"bucket": "tempo",
"endpoint": "http://1.2.3.4:9000",
"secret-key": "soverysecret",
}
),
},
)


def test_monolithic_deployment(tempo_coordinator, tempo_coordinator_state, tempo_worker,
tempo_worker_state, s3_facade, update_s3_facade_action):
def test_monolithic_deployment(
tempo_coordinator,
tempo_coordinator_state,
tempo_worker,
tempo_worker_state,
s3_facade,
update_s3_facade_action,
):
c = Catan()

c.deploy(s3_facade)
Expand All @@ -187,4 +195,4 @@ def test_monolithic_deployment(tempo_coordinator, tempo_coordinator_state, tempo

c.settle()

c.check_status(tempo_coordinator, name='active')
c.check_status(tempo_coordinator, name="active")
1 change: 1 addition & 0 deletions tests/unit/test_coherence.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from unittest.mock import MagicMock

import pytest as pytest

from coordinator import (
MINIMAL_DEPLOYMENT,
RECOMMENDED_DEPLOYMENT,
Expand Down

0 comments on commit 6d9277b

Please sign in to comment.