Skip to content

Commit

Permalink
logic refactor to address PR comments
Browse files Browse the repository at this point in the history
  • Loading branch information
IbraAoad committed Jan 3, 2024
1 parent 22ff036 commit e63e7ae
Show file tree
Hide file tree
Showing 5 changed files with 71 additions and 64 deletions.
4 changes: 1 addition & 3 deletions metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@ requires:
interface: s3
limit: 1
description: |
The coordinator obtains storage information on behalf of the workers and forwards all workers
the storage details over mimir-worker. Limited to 1 to ensure only one storage backend will
be connected to Mimir.
The coordinator obtains and shares storage details with workers, enabling Mimir's access to an S3 bucket for data storage.
send-remote-write:
interface: prometheus_remote_write
Expand Down
61 changes: 34 additions & 27 deletions src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from charms.prometheus_k8s.v0.prometheus_remote_write import (
PrometheusRemoteWriteConsumer,
)
from mimir_config import BUCKET_NAME, S3_RELATION_NAME, _S3StorageBackend
from mimir_config import BUCKET_NAME, S3_RELATION_NAME, _S3ConfigData
from mimir_coordinator import MimirCoordinator
from nginx import Nginx
from ops.charm import CharmBase, CollectStatusEvent
Expand Down Expand Up @@ -56,16 +56,17 @@ def __init__(self, *args):
self._on_nginx_pebble_ready,
)

self._s3_storage_data = _S3StorageBackend()
self.framework.observe(
self.on.mimir_cluster_relation_changed, # pyright: ignore
self._on_mimir_cluster_changed,
)

self.framework.observe(
self.s3_requirer.on.credentials_changed, self._on_mimir_cluster_changed
self.s3_requirer.on.credentials_changed, self._on_s3_requirer_credentials_changed
)
self.framework.observe(
self.s3_requirer.on.credentials_gone, self._on_s3_requirer_credentials_gone
)
self.framework.observe(self.s3_requirer.on.credentials_gone, self._on_s3_departed)

self.remote_write_consumer = PrometheusRemoteWriteConsumer(self)
self.framework.observe(
Expand All @@ -92,51 +93,57 @@ def mimir_worker_relations(self) -> List[Relation]:
"""Returns the list of worker relations."""
return self.model.relations.get("mimir_worker", [])

def _on_config_changed(self, event):
def _on_config_changed(self, _):
"""Handle changed configuration."""
self.publish_config()
s3_config_data = self._get_s3_storage_config()
self.publish_config(s3_config_data)

def publish_config(self):
"""Generate config file and publish to all workers."""
mimir_config = self.coordinator.build_config(self._s3_storage_data)
self.cluster_provider.publish_configs(mimir_config)
def _on_mimir_cluster_changed(self, _):
self._process_cluster_and_s3_credentials_changes()

def _s3_connection_info(self):
"""Parse a _S3StorageBackend object from relation data."""
if not self.s3_requirer.relations:
self._s3_storage_data = _S3StorageBackend()
return
raw = self.s3_requirer.get_s3_connection_info()
raw["access_key"], raw["secret_key"] = raw.pop("access-key", ""), raw.pop("secret-key", "")
self._s3_storage_data = _S3StorageBackend(**raw)
def _on_s3_requirer_credentials_changed(self, _):
self._process_cluster_and_s3_credentials_changes()

def _on_mimir_cluster_changed(self, _):
def _process_cluster_and_s3_credentials_changes(self):
if not self.coordinator.is_coherent():
logger.warning("Incoherent deployment: Some required Mimir roles are missing.")
return
self._s3_connection_info()
if self._s3_storage_data == _S3StorageBackend() and self.coordinator.is_scaled():
s3_config_data = self._get_s3_storage_config()
if s3_config_data == _S3ConfigData() and self.coordinator.has_multiple_workers():
logger.warning("Filesystem storage cannot be used with replicated mimir workers")
return
self.publish_config()
self.publish_config(s3_config_data)

def _on_s3_departed(self, _):
self._s3_storage_data = _S3StorageBackend()
def _on_s3_requirer_credentials_gone(self, _):
if not self.coordinator.is_coherent():
logger.warning("Incoherent deployment: Some required Mimir roles are missing.")
return
if self.coordinator.is_scaled():
if self.coordinator.has_multiple_workers():
logger.warning("Filesystem storage cannot be used with replicated mimir workers")
return
self.publish_config()
self.publish_config(_S3ConfigData())

def publish_config(self, s3_config_data: _S3ConfigData):
"""Generate config file and publish to all workers."""
mimir_config = self.coordinator.build_config(s3_config_data)
self.cluster_provider.publish_configs(mimir_config)

def _get_s3_storage_config(self):
"""Retrieve S3 storage configuration."""
if not self.s3_requirer.relations:
return _S3ConfigData()
raw = self.s3_requirer.get_s3_connection_info()
raw["access_key"], raw["secret_key"] = raw.pop("access-key", ""), raw.pop("secret-key", "")
return _S3ConfigData(**raw)

def _on_collect_status(self, event: CollectStatusEvent):
"""Handle start event."""
if not self.coordinator.is_coherent():
event.add_status(
BlockedStatus("Incoherent deployment: you are lacking some required Mimir roles")
)
if self._s3_storage_data == _S3StorageBackend() and self.coordinator.is_scaled():
s3_config_data = self._get_s3_storage_config()
if s3_config_data == _S3ConfigData() and self.coordinator.has_multiple_workers():
event.add_status(
BlockedStatus("Missing s3 relation, replicated units must use S3 storage.")
)
Expand Down
4 changes: 2 additions & 2 deletions src/mimir_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ class Alertmanager(BaseModel):
external_url: Optional[str]


class _S3StorageBackend(BaseModel):
class _S3ConfigData(BaseModel):
access_key: str = ""
endpoint: str = ""
secret_key: str = ""
Expand All @@ -98,7 +98,7 @@ class _FilesystemStorageBackend(BaseModel):
dir: str


_StorageBackend = Union[_S3StorageBackend, _FilesystemStorageBackend]
_StorageBackend = Union[_S3ConfigData, _FilesystemStorageBackend]
_StorageKey = Union[Literal["filesystem"], Literal["s3"]]


Expand Down
45 changes: 23 additions & 22 deletions src/mimir_coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from typing import Any, Dict, Iterable

from charms.mimir_coordinator_k8s.v0.mimir_cluster import MimirClusterProvider, MimirRole
from mimir_config import _S3StorageBackend
from mimir_config import _S3ConfigData

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -70,8 +70,8 @@ def is_coherent(self) -> bool:
roles: Iterable[MimirRole] = self._cluster_provider.gather_roles().keys()
return set(roles).issuperset(MINIMAL_DEPLOYMENT)

def is_scaled(self) -> bool:
"""Return True if more than 1 worker are forming the mimir cluster."""
def has_multiple_workers(self) -> bool:
"""Return True if there are multiple workers forming the Mimir cluster."""
return len(list(self._cluster_provider.gather_addresses())) > 1

def is_recommended(self) -> bool:
Expand All @@ -86,7 +86,7 @@ def is_recommended(self) -> bool:
return False
return True

def build_config(self, s3_data: _S3StorageBackend) -> Dict[str, Any]:
def build_config(self, s3_config_data: _S3ConfigData) -> Dict[str, Any]:
"""Generate shared config file for mimir.
Reference: https://grafana.com/docs/mimir/latest/configure/
Expand All @@ -102,13 +102,11 @@ def build_config(self, s3_data: _S3StorageBackend) -> Dict[str, Any]:
"memberlist": self._build_memberlist_config(),
}

if s3_data != _S3StorageBackend():
mimir_config["common"]["storage"] = self._build_s3_storage_config(s3_data)
self._update_s3_storage_config(mimir_config["blocks_storage"], "filesystem", "blocks")
self._update_s3_storage_config(mimir_config["ruler_storage"], "filesystem", "rules")
self._update_s3_storage_config(
mimir_config["alertmanager_storage"], "filesystem", "alerts"
)
if s3_config_data != _S3ConfigData():
mimir_config["common"]["storage"] = self._build_s3_storage_config(s3_config_data)
self._update_s3_storage_config(mimir_config["blocks_storage"], "blocks")
self._update_s3_storage_config(mimir_config["ruler_storage"], "rules")
self._update_s3_storage_config(mimir_config["alertmanager_storage"], "alerts")

if self._tls_requirer:
mimir_config.update(self._build_tls_config())
Expand Down Expand Up @@ -184,23 +182,26 @@ def _build_blocks_storage_config(self) -> Dict[str, Any]:
},
}

def _build_s3_storage_config(self, s3_data: _S3StorageBackend) -> Dict[str, Any]:
def _build_s3_storage_config(self, s3_config_data: _S3ConfigData) -> Dict[str, Any]:
return {
"backend": "s3",
"s3": {
"endpoint": s3_data.endpoint,
"access_key_id": s3_data.access_key,
"secret_access_key": s3_data.secret_key,
"bucket_name": s3_data.bucket,
"region": s3_data.region,
"endpoint": s3_config_data.endpoint,
"access_key_id": s3_config_data.access_key,
"secret_access_key": s3_config_data.secret_key,
"bucket_name": s3_config_data.bucket,
"region": s3_config_data.region,
},
}

def _update_s3_storage_config(
self, storage_config: Dict[str, Any], old_key: str, prefix_name: str
) -> None:
if old_key in storage_config:
storage_config.pop(old_key)
def _update_s3_storage_config(self, storage_config: Dict[str, Any], prefix_name: str) -> None:
"""Update S3 storage configuration in `storage_config`.
If the key 'filesystem' is present in `storage_config`, remove it and add a new key
'storage_prefix' with the value of `prefix_name` for the S3 bucket.
"""
if "filesystem" in storage_config:
storage_config.pop("filesystem")
storage_config["storage_prefix"] = prefix_name

def _build_memberlist_config(self) -> Dict[str, Any]:
Expand Down
21 changes: 11 additions & 10 deletions tests/unit/test_config.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import unittest
from unittest.mock import MagicMock

from mimir_config import _S3StorageBackend
from mimir_config import _S3ConfigData
from mimir_coordinator import MimirCoordinator


Expand Down Expand Up @@ -49,33 +49,34 @@ def test_build_blocks_storage_config(self):
self.assertEqual(blocks_storage_config, expected_config)

def test_build_config_with_s3_data(self):
s3_data = _S3StorageBackend(
s3_config_data = _S3ConfigData(
endpoint="s3.com:port",
access_key="your_access_key",
secret_key="your_secret_key",
bucket="your_bucket",
region="your_region",
)
mimir_config = self.coordinator.build_config(s3_data)
mimir_config = self.coordinator.build_config(s3_config_data)
self.assertIn("storage", mimir_config["common"])
self.assertEqual(
mimir_config["common"]["storage"], self.coordinator._build_s3_storage_config(s3_data)
mimir_config["common"]["storage"],
self.coordinator._build_s3_storage_config(s3_config_data),
)

def test_build_config_without_s3_data(self):
s3_data = _S3StorageBackend()
mimir_config = self.coordinator.build_config(s3_data)
s3_config_data = _S3ConfigData()
mimir_config = self.coordinator.build_config(s3_config_data)
self.assertNotIn("storage", mimir_config["common"])

def test_build_s3_storage_config(self):
s3_data = _S3StorageBackend(
s3_config_data = _S3ConfigData(
endpoint="s3.com:port",
access_key="your_access_key",
secret_key="your_secret_key",
bucket="your_bucket",
region="your_region",
)
s3_storage_config = self.coordinator._build_s3_storage_config(s3_data)
s3_storage_config = self.coordinator._build_s3_storage_config(s3_config_data)
expected_config = {
"backend": "s3",
"s3": {
Expand All @@ -90,13 +91,13 @@ def test_build_s3_storage_config(self):

def test_update_s3_storage_config(self):
storage_config = {"filesystem": {"dir": "/etc/mimir/blocks"}}
self.coordinator._update_s3_storage_config(storage_config, "filesystem", "blocks")
self.coordinator._update_s3_storage_config(storage_config, "blocks")
expected_config = {"storage_prefix": "blocks"}
self.assertEqual(storage_config, expected_config)

def test_ne_update_s3_storage_config(self):
storage_config = {"storage_prefix": "blocks"}
self.coordinator._update_s3_storage_config(storage_config, "filesystem", "blocks")
self.coordinator._update_s3_storage_config(storage_config, "blocks")
expected_config = {"storage_prefix": "blocks"}
self.assertEqual(storage_config, expected_config)

Expand Down

0 comments on commit e63e7ae

Please sign in to comment.