Skip to content

Commit

Permalink
[DPE-2671] integration test (#464)
Browse files Browse the repository at this point in the history
* Rerender and reenable pgbackrest config and service

* Charm lib bump

* Delete pod test

* Add logs

* Don't log config file
  • Loading branch information
dragomirp authored May 2, 2024
1 parent e2bae44 commit 15bf90b
Show file tree
Hide file tree
Showing 6 changed files with 90 additions and 37 deletions.
25 changes: 15 additions & 10 deletions lib/charms/loki_k8s/v0/loki_push_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,9 @@ def _alert_rules_error(self, event):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 26
LIBPATCH = 29

PYDEPS = ["cosl"]

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -2116,15 +2118,18 @@ def _download_and_push_promtail_to_workload(self, promtail_info: dict) -> None:
- "binsha": sha256 sum of unpacked promtail binary
"""
# Check for Juju proxy variables and fall back to standard ones if not set
proxies: Optional[Dict[str, str]] = {}
if proxies and os.environ.get("JUJU_CHARM_HTTP_PROXY"):
proxies.update({"http": os.environ["JUJU_CHARM_HTTP_PROXY"]})
if proxies and os.environ.get("JUJU_CHARM_HTTPS_PROXY"):
proxies.update({"https": os.environ["JUJU_CHARM_HTTPS_PROXY"]})
if proxies and os.environ.get("JUJU_CHARM_NO_PROXY"):
proxies.update({"no_proxy": os.environ["JUJU_CHARM_NO_PROXY"]})
else:
proxies = None
# If no Juju proxy variable was set, we set proxies to None to let the ProxyHandler get
# the proxy env variables from the environment
proxies = {
# The ProxyHandler uses only the protocol names as keys
# https://docs.python.org/3/library/urllib.request.html#urllib.request.ProxyHandler
"https": os.environ.get("JUJU_CHARM_HTTPS_PROXY", ""),
"http": os.environ.get("JUJU_CHARM_HTTP_PROXY", ""),
# The ProxyHandler uses `no` for the no_proxy key
# https://github.com/python/cpython/blob/3.12/Lib/urllib/request.py#L2553
"no": os.environ.get("JUJU_CHARM_NO_PROXY", ""),
}
proxies = {k: v for k, v in proxies.items() if v != ""} or None

proxy_handler = request.ProxyHandler(proxies)
opener = request.build_opener(proxy_handler)
Expand Down
17 changes: 8 additions & 9 deletions lib/charms/prometheus_k8s/v0/prometheus_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def _on_scrape_targets_changed(self, event):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 44
LIBPATCH = 46

PYDEPS = ["cosl"]

Expand Down Expand Up @@ -521,8 +521,8 @@ def expand_wildcard_targets_into_individual_jobs(
# for such a target. Therefore labeling with Juju topology, excluding the
# unit name.
non_wildcard_static_config["labels"] = {
**non_wildcard_static_config.get("labels", {}),
**topology.label_matcher_dict,
**non_wildcard_static_config.get("labels", {}),
}

non_wildcard_static_configs.append(non_wildcard_static_config)
Expand All @@ -547,9 +547,9 @@ def expand_wildcard_targets_into_individual_jobs(
if topology:
# Add topology labels
modified_static_config["labels"] = {
**modified_static_config.get("labels", {}),
**topology.label_matcher_dict,
**{"juju_unit": unit_name},
**modified_static_config.get("labels", {}),
}

# Instance relabeling for topology should be last in order.
Expand Down Expand Up @@ -1537,12 +1537,11 @@ def set_scrape_job_spec(self, _=None):
relation.data[self._charm.app]["scrape_metadata"] = json.dumps(self._scrape_metadata)
relation.data[self._charm.app]["scrape_jobs"] = json.dumps(self._scrape_jobs)

if alert_rules_as_dict:
# Update relation data with the string representation of the rule file.
# Juju topology is already included in the "scrape_metadata" field above.
# The consumer side of the relation uses this information to name the rules file
# that is written to the filesystem.
relation.data[self._charm.app]["alert_rules"] = json.dumps(alert_rules_as_dict)
# Update relation data with the string representation of the rule file.
# Juju topology is already included in the "scrape_metadata" field above.
# The consumer side of the relation uses this information to name the rules file
# that is written to the filesystem.
relation.data[self._charm.app]["alert_rules"] = json.dumps(alert_rules_as_dict)

def _set_unit_ip(self, _=None):
"""Set unit host address.
Expand Down
17 changes: 13 additions & 4 deletions lib/charms/rolling_ops/v0/rollingops.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def _restart(self, event):
To kick off the rolling restart, emit this library's AcquireLock event. The simplest way
to do so would be with an action, though it might make sense to acquire the lock in
response to another event.
response to another event.
```python
def _on_trigger_restart(self, event):
Expand Down Expand Up @@ -88,7 +88,7 @@ def _on_trigger_restart(self, event):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 5
LIBPATCH = 6


class LockNoRelationError(Exception):
Expand Down Expand Up @@ -182,6 +182,7 @@ def _state(self) -> LockState:
# Active acquire request.
return LockState.ACQUIRE

logger.debug("Lock state: %s %s", unit_state, app_state)
return app_state # Granted or unset/released

@_state.setter
Expand All @@ -202,21 +203,27 @@ def _state(self, state: LockState):
if state is LockState.IDLE:
self.relation.data[self.app].update({str(self.unit): state.value})

logger.debug("state: %s", state.value)

def acquire(self):
"""Request that a lock be acquired."""
self._state = LockState.ACQUIRE
logger.debug("Lock acquired.")

def release(self):
"""Request that a lock be released."""
self._state = LockState.RELEASE
logger.debug("Lock released.")

def clear(self):
"""Unset a lock."""
self._state = LockState.IDLE
logger.debug("Lock cleared.")

def grant(self):
"""Grant a lock to a unit."""
self._state = LockState.GRANTED
logger.debug("Lock granted.")

def is_held(self):
"""This unit holds the lock."""
Expand Down Expand Up @@ -266,9 +273,11 @@ def __init__(self, handle, callback_override: Optional[str] = None):
self.callback_override = callback_override or ""

def snapshot(self):
"""Snapshot of lock event."""
return {"callback_override": self.callback_override}

def restore(self, snapshot):
"""Restores lock event."""
self.callback_override = snapshot["callback_override"]


Expand All @@ -288,7 +297,7 @@ def __init__(self, charm: CharmBase, relation: AnyStr, callback: Callable):
charm: the charm we are attaching this to.
relation: an identifier, by convention based on the name of the relation in the
metadata.yaml, which identifies this instance of RollingOperatorsFactory,
distinct from other instances that may be hanlding other events.
distinct from other instances that may be handling other events.
callback: a closure to run when we have a lock. (It must take a CharmBase object and
EventBase object as args.)
"""
Expand Down Expand Up @@ -381,7 +390,7 @@ def _on_acquire_lock(self: CharmBase, event: ActionEvent):
"""Request a lock."""
try:
Lock(self).acquire() # Updates relation data
# emit relation changed event in the edge case where aquire does not
# emit relation changed event in the edge case where acquire does not
relation = self.model.get_relation(self.name)

# persist callback override for eventual run
Expand Down
30 changes: 16 additions & 14 deletions lib/charms/tls_certificates_interface/v2/tls_certificates.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ def _on_all_certificates_invalidated(self, event: AllCertificatesInvalidatedEven
import logging
import uuid
from contextlib import suppress
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from ipaddress import IPv4Address
from typing import Any, Dict, List, Literal, Optional, Union

Expand All @@ -286,7 +286,7 @@ def _on_all_certificates_invalidated(self, event: AllCertificatesInvalidatedEven
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.serialization import pkcs12
from jsonschema import exceptions, validate # type: ignore[import-untyped]
from jsonschema import exceptions, validate
from ops.charm import (
CharmBase,
CharmEvents,
Expand All @@ -307,7 +307,7 @@ def _on_all_certificates_invalidated(self, event: AllCertificatesInvalidatedEven

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 27
LIBPATCH = 28

PYDEPS = ["cryptography", "jsonschema"]

Expand Down Expand Up @@ -635,7 +635,9 @@ def _get_closest_future_time(
datetime: expiry_notification_time if not in the past, expiry_time otherwise
"""
return (
expiry_notification_time if datetime.utcnow() < expiry_notification_time else expiry_time
expiry_notification_time
if datetime.now(timezone.utc) < expiry_notification_time
else expiry_time
)


Expand All @@ -650,7 +652,7 @@ def _get_certificate_expiry_time(certificate: str) -> Optional[datetime]:
"""
try:
certificate_object = x509.load_pem_x509_certificate(data=certificate.encode())
return certificate_object.not_valid_after
return certificate_object.not_valid_after_utc
except ValueError:
logger.warning("Could not load certificate.")
return None
Expand Down Expand Up @@ -705,8 +707,8 @@ def generate_ca(
.issuer_name(subject_name)
.public_key(private_key_object.public_key()) # type: ignore[arg-type]
.serial_number(x509.random_serial_number())
.not_valid_before(datetime.utcnow())
.not_valid_after(datetime.utcnow() + timedelta(days=validity))
.not_valid_before(datetime.now(timezone.utc))
.not_valid_after(datetime.now(timezone.utc) + timedelta(days=validity))
.add_extension(x509.SubjectKeyIdentifier(digest=subject_identifier), critical=False)
.add_extension(
x509.AuthorityKeyIdentifier(
Expand Down Expand Up @@ -860,8 +862,8 @@ def generate_certificate(
.issuer_name(issuer)
.public_key(csr_object.public_key())
.serial_number(x509.random_serial_number())
.not_valid_before(datetime.utcnow())
.not_valid_after(datetime.utcnow() + timedelta(days=validity))
.not_valid_before(datetime.now(timezone.utc))
.not_valid_after(datetime.now(timezone.utc) + timedelta(days=validity))
)
extensions = get_certificate_extensions(
authority_key_identifier=ca_pem.extensions.get_extension_for_class(
Expand Down Expand Up @@ -1070,7 +1072,7 @@ class CertificatesRequirerCharmEvents(CharmEvents):
class TLSCertificatesProvidesV2(Object):
"""TLS certificates provider class to be instantiated by TLS certificates providers."""

on = CertificatesProviderCharmEvents()
on = CertificatesProviderCharmEvents() # type: ignore[reportAssignmentType]

def __init__(self, charm: CharmBase, relationship_name: str):
super().__init__(charm, relationship_name)
Expand Down Expand Up @@ -1481,7 +1483,7 @@ def certificate_issued_for_csr(
class TLSCertificatesRequiresV2(Object):
"""TLS certificates requirer class to be instantiated by TLS certificates requirers."""

on = CertificatesRequirerCharmEvents()
on = CertificatesRequirerCharmEvents() # type: ignore[reportAssignmentType]

def __init__(
self,
Expand Down Expand Up @@ -1708,7 +1710,7 @@ def get_expiring_certificates(self) -> List[Dict[str, str]]:
expiry_notification_time = expiry_time - timedelta(
hours=self.expiry_notification_time
)
if datetime.utcnow() > expiry_notification_time:
if datetime.now(timezone.utc) > expiry_notification_time:
final_list.append(cert)
return final_list

Expand Down Expand Up @@ -1891,7 +1893,7 @@ def _on_secret_expired(self, event: SecretExpiredEvent) -> None:
event.secret.remove_all_revisions()
return

if datetime.utcnow() < expiry_time:
if datetime.now(timezone.utc) < expiry_time:
logger.warning("Certificate almost expired")
self.on.certificate_expiring.emit(
certificate=certificate_dict["certificate"],
Expand Down Expand Up @@ -1937,7 +1939,7 @@ def _on_update_status(self, event: UpdateStatusEvent) -> None:
expiry_time = _get_certificate_expiry_time(certificate_dict["certificate"])
if not expiry_time:
continue
time_difference = expiry_time - datetime.utcnow()
time_difference = expiry_time - datetime.now(timezone.utc)
if time_difference.total_seconds() < 0:
logger.warning("Certificate is expired")
self.on.certificate_invalidated.emit(
Expand Down
12 changes: 12 additions & 0 deletions tests/integration/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import asyncio
import itertools
from datetime import datetime
from multiprocessing import ProcessError
from pathlib import Path
from typing import List, Optional

Expand Down Expand Up @@ -762,3 +763,14 @@ def wait_for_relation_removed_between(
break
except RetryError:
assert False, "Relation failed to exit after 3 minutes."


async def cat_file_from_unit(ops_test: OpsTest, filepath: str, unit_name: str) -> str:
"""Gets a file from the postgresql container of an application unit."""
cat_cmd = f"ssh --container postgresql {unit_name} cat {filepath}"
return_code, output, _ = await ops_test.juju(*cat_cmd.split(" "))
if return_code != 0:
raise ProcessError(
"Expected cat command %s to succeed instead it failed: %s", cat_cmd, return_code
)
return output
26 changes: 26 additions & 0 deletions tests/integration/test_backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,15 @@

import boto3
import pytest as pytest
from lightkube.core.client import Client
from lightkube.resources.core_v1 import Pod
from pytest_operator.plugin import OpsTest
from tenacity import Retrying, stop_after_attempt, wait_exponential

from .helpers import (
DATABASE_APP_NAME,
build_and_deploy,
cat_file_from_unit,
construct_endpoint,
db_connect,
get_password,
Expand Down Expand Up @@ -424,3 +427,26 @@ async def test_invalid_config_and_recovery_after_fixing_it(
await ops_test.model.wait_for_idle(
apps=[database_app_name, S3_INTEGRATOR_APP_NAME], status="active"
)


@pytest.mark.group(1)
async def test_delete_pod(ops_test: OpsTest):
logger.info("Getting original backup config")
database_app_name = f"new-{DATABASE_APP_NAME}"
original_pgbackrest_config = await cat_file_from_unit(
ops_test, "/etc/pgbackrest.conf", f"{database_app_name}/0"
)

# delete the pod
logger.info("Deleting the pod")
client = Client(namespace=ops_test.model.info.name)
client.delete(Pod, name=f"{database_app_name}-0")

# Wait and get the primary again (which can be any unit, including the previous primary).
async with ops_test.fast_forward():
await ops_test.model.wait_for_idle(apps=[database_app_name], status="active")

new_pgbackrest_config = await cat_file_from_unit(
ops_test, "/etc/pgbackrest.conf", f"{database_app_name}/0"
)
assert original_pgbackrest_config == new_pgbackrest_config, "Pgbackrest config not rerendered"

0 comments on commit 15bf90b

Please sign in to comment.