From 9b9c0a07d0fd32ce078d340b38c524d8a138fc55 Mon Sep 17 00:00:00 2001 From: Adam Dyess Date: Thu, 19 Dec 2024 14:37:02 -0600 Subject: [PATCH] Pick up changes applied to 1.32-release branch (#225) * Pin snap_installations to 1.32-classic stables (#223) * Revert some back to main branch standard --- .github/workflows/update-snap-revision.py | 2 +- charms/worker/k8s/src/literals.py | 4 ++ charms/worker/k8s/src/snap.py | 33 +++++++++--- charms/worker/k8s/src/upgrade.py | 4 +- .../k8s/templates/snap_installation.yaml | 4 +- charms/worker/k8s/tests/unit/test_snap.py | 10 ++-- charms/worker/k8s/tests/unit/test_upgrade.py | 6 +-- tests/integration/test_k8s.py | 4 +- tests/integration/test_upgrade.py | 52 +++++++++++++++---- 9 files changed, 86 insertions(+), 33 deletions(-) diff --git a/.github/workflows/update-snap-revision.py b/.github/workflows/update-snap-revision.py index 1e241f8c..329bddde 100644 --- a/.github/workflows/update-snap-revision.py +++ b/.github/workflows/update-snap-revision.py @@ -13,7 +13,7 @@ logging.basicConfig(format="%(levelname)-8s: %(message)s", level=logging.INFO) log = logging.getLogger("update-snap-revision") -TRACK = "1.31-classic" +TRACK = "1.32-classic" RISK = "stable" ROOT = Path(__file__).parent / ".." / ".." INSTALLATION = ROOT / "charms/worker/k8s/templates/snap_installation.yaml" diff --git a/charms/worker/k8s/src/literals.py b/charms/worker/k8s/src/literals.py index df1a1ef7..03ad8ce2 100644 --- a/charms/worker/k8s/src/literals.py +++ b/charms/worker/k8s/src/literals.py @@ -19,6 +19,9 @@ K8SD_PORT = 6400 SUPPORTED_DATASTORES = ["dqlite", "etcd"] +# Features +SUPPORT_SNAP_INSTALLATION_OVERRIDE = True + # Relations CLUSTER_RELATION = "cluster" CLUSTER_WORKER_RELATION = "k8s-cluster" @@ -27,6 +30,7 @@ COS_TOKENS_WORKER_RELATION = "cos-worker-tokens" COS_RELATION = "cos-agent" ETCD_RELATION = "etcd" +UPGRADE_RELATION = "upgrade" # Kubernetes services K8S_COMMON_SERVICES = [ diff --git a/charms/worker/k8s/src/snap.py b/charms/worker/k8s/src/snap.py index 0f19a7c5..c1cbbe74 100644 --- a/charms/worker/k8s/src/snap.py +++ b/charms/worker/k8s/src/snap.py @@ -19,6 +19,7 @@ import charms.operator_libs_linux.v2.snap as snap_lib import ops import yaml +from literals import SUPPORT_SNAP_INSTALLATION_OVERRIDE from protocols import K8sCharmProtocol from pydantic import BaseModel, Field, ValidationError, parse_obj_as, validator from typing_extensions import Annotated @@ -110,6 +111,15 @@ def _validate_revision(cls, value: Union[str, int, None]) -> Optional[str]: raise ValueError(f"Revision is not an integer: {value}") return value + def __str__(self): + """Represent the snap store argument as a string. + + Returns: + str: The string representation + """ + _type = type(self).__name__ + return f"<{_type}: {self.name}-{self.revision}.{self.channel} -- {self.state}>" + SnapArgument = Annotated[ Union[SnapFileArgument, SnapStoreArgument], Field(discriminator="install_type") @@ -175,6 +185,10 @@ def _select_snap_installation(charm: ops.CharmBase) -> Path: Raises: SnapError: when the management issue cannot be resolved """ + if not SUPPORT_SNAP_INSTALLATION_OVERRIDE: + log.error("Unavailable feature: overriding 'snap-installation' resource.") + return _default_snap_installation() + try: resource_path = charm.model.resources.fetch("snap-installation") except (ops.ModelError, NameError): @@ -269,23 +283,28 @@ def management(charm: K8sCharmProtocol) -> None: Arguments: charm: The charm instance + + Raises: + SnapError: when the management issue cannot be resolved """ cache = snap_lib.SnapCache() for args in _parse_management_arguments(charm): - which = cache[args.name] + which: snap_lib.Snap = cache[args.name] if block_refresh(which, args, charm.is_upgrade_granted): continue install_args = args.dict(exclude_none=True) if isinstance(args, SnapFileArgument) and which.revision != "x1": snap_lib.install_local(**install_args) - elif isinstance(args, SnapStoreArgument) and args.revision: - if which.revision != args.revision: - log.info("Ensuring %s snap revision=%s", args.name, args.revision) + elif isinstance(args, SnapStoreArgument): + log.info("Ensuring args=%s current=%s", str(args), str(which)) + new_rev = bool(args.revision) and which.revision != args.revision + new_channel = bool(args.channel) and which.channel != args.channel + new_state = which.state not in (snap_lib.SnapState.Present, snap_lib.SnapState.Latest) + if any([new_rev, new_channel, new_state]): which.ensure(**install_args) which.hold() - elif isinstance(args, SnapStoreArgument): - log.info("Ensuring %s snap channel=%s", args.name, args.channel) - which.ensure(**install_args) + else: + raise snap_lib.SnapError(f"Unsupported snap argument: {args}") def block_refresh(which: snap_lib.Snap, args: SnapArgument, upgrade_granted: bool = False) -> bool: diff --git a/charms/worker/k8s/src/upgrade.py b/charms/worker/k8s/src/upgrade.py index 520a1f7a..f45ccb5b 100644 --- a/charms/worker/k8s/src/upgrade.py +++ b/charms/worker/k8s/src/upgrade.py @@ -19,11 +19,11 @@ from charms.operator_libs_linux.v2.snap import SnapError from inspector import ClusterInspector from literals import ( - CLUSTER_RELATION, K8S_CONTROL_PLANE_SERVICES, K8S_DQLITE_SERVICE, K8S_WORKER_SERVICES, SNAP_NAME, + UPGRADE_RELATION, ) from protocols import K8sCharmProtocol from pydantic import BaseModel @@ -226,7 +226,7 @@ def build_upgrade_stack(self) -> List[int]: Returns: A list of unit numbers to upgrade in order. """ - relation = self.charm.model.get_relation(CLUSTER_RELATION) + relation = self.charm.model.get_relation(UPGRADE_RELATION) if not relation: return [int(self.charm.unit.name.split("/")[-1])] diff --git a/charms/worker/k8s/templates/snap_installation.yaml b/charms/worker/k8s/templates/snap_installation.yaml index 828d2da3..7a689219 100644 --- a/charms/worker/k8s/templates/snap_installation.yaml +++ b/charms/worker/k8s/templates/snap_installation.yaml @@ -4,10 +4,10 @@ amd64: - name: k8s install-type: store - channel: edge classic: true + channel: latest/edge arm64: - name: k8s install-type: store - channel: edge classic: true + channel: latest/edge diff --git a/charms/worker/k8s/tests/unit/test_snap.py b/charms/worker/k8s/tests/unit/test_snap.py index 65057caf..9d0bb8a5 100644 --- a/charms/worker/k8s/tests/unit/test_snap.py +++ b/charms/worker/k8s/tests/unit/test_snap.py @@ -212,6 +212,7 @@ def _create_gzip_tar_string(file_data_dict): return gzip_buffer.getvalue() +@mock.patch("snap.SUPPORT_SNAP_INSTALLATION_OVERRIDE", True) def test_resource_supplied_installation_by_channel(harness): """Test resource installs by store channel.""" arch = snap._local_arch() @@ -226,6 +227,7 @@ def test_resource_supplied_installation_by_channel(harness): assert args[0].install_type == "store" +@mock.patch("snap.SUPPORT_SNAP_INSTALLATION_OVERRIDE", True) def test_resource_supplied_installation_by_filename(harness, resource_snap_installation): """Test resource installs by included filename.""" arch = snap._local_arch() @@ -251,6 +253,7 @@ def test_resource_supplied_installation_by_filename(harness, resource_snap_insta assert args[0].classic +@mock.patch("snap.SUPPORT_SNAP_INSTALLATION_OVERRIDE", True) def test_resource_supplied_snap(harness, resource_snap_installation): """Test resource installs by snap only.""" file_data = {"./k8s_xxxx.snap": ""} @@ -339,12 +342,7 @@ def test_management_installs_store_from_revision(cache, install_local, args, rev args.return_value = [snap.SnapStoreArgument(name="k8s", revision=123)] snap.management(harness.charm) install_local.assert_not_called() - if revision == "123": - k8s_snap.ensure.assert_not_called() - else: - k8s_snap.ensure.assert_called_once_with( - state=snap.snap_lib.SnapState.Present, revision="123" - ) + k8s_snap.ensure.assert_called_once_with(state=snap.snap_lib.SnapState.Present, revision="123") @mock.patch("subprocess.check_output") diff --git a/charms/worker/k8s/tests/unit/test_upgrade.py b/charms/worker/k8s/tests/unit/test_upgrade.py index 3c5ddaac..ddb1631c 100644 --- a/charms/worker/k8s/tests/unit/test_upgrade.py +++ b/charms/worker/k8s/tests/unit/test_upgrade.py @@ -11,7 +11,7 @@ from inspector import ClusterInspector from lightkube.models.core_v1 import Node from lightkube.models.meta_v1 import ObjectMeta -from literals import CLUSTER_RELATION +from literals import UPGRADE_RELATION from upgrade import K8sDependenciesModel, K8sUpgrade @@ -103,7 +103,7 @@ def test_build_upgrade_stack_no_relation(self): result = self.upgrade.build_upgrade_stack() self.assertEqual(result, [0]) - self.charm.model.get_relation.assert_called_once_with(CLUSTER_RELATION) + self.charm.model.get_relation.assert_called_once_with(UPGRADE_RELATION) def test_build_upgrade_stack_with_relation(self): """Test build_upgrade_stack with cluster relation.""" @@ -119,7 +119,7 @@ def test_build_upgrade_stack_with_relation(self): result = self.upgrade.build_upgrade_stack() self.assertEqual(sorted(result), [0, 1, 2]) - self.charm.model.get_relation.assert_called_once_with(CLUSTER_RELATION) + self.charm.model.get_relation.assert_called_once_with(UPGRADE_RELATION) def test_verify_worker_versions_compatible(self): """Test _verify_worker_versions returns True when worker versions is compatible.""" diff --git a/tests/integration/test_k8s.py b/tests/integration/test_k8s.py index 46b256c7..f1b44880 100644 --- a/tests/integration/test_k8s.py +++ b/tests/integration/test_k8s.py @@ -153,13 +153,13 @@ async def override_snap_on_k8s(kubernetes_cluster: model.Model, request): with override.open("rb") as obj: k8s.attach_resource("snap-installation", override, obj) - await kubernetes_cluster.wait_for_idle(status="active", timeout=1 * 60) + await kubernetes_cluster.wait_for_idle(status="active", timeout=5 * 60) yield k8s with revert.open("rb") as obj: k8s.attach_resource("snap-installation", revert, obj) - await kubernetes_cluster.wait_for_idle(status="active", timeout=1 * 60) + await kubernetes_cluster.wait_for_idle(status="active", timeout=5 * 60) async def test_verbose_config(kubernetes_cluster: model.Model): diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py index 883243fb..1850262f 100644 --- a/tests/integration/test_upgrade.py +++ b/tests/integration/test_upgrade.py @@ -6,7 +6,8 @@ """Upgrade Integration tests.""" import logging -from typing import Optional +import subprocess +from typing import Iterable, Optional, Tuple import juju.application import juju.model @@ -16,20 +17,53 @@ from pytest_operator.plugin import OpsTest from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed -from .helpers import Bundle, get_leader, get_rsc +from .helpers import CHARMCRAFT_DIRS, Bundle, get_leader, get_rsc + +CHARM_UPGRADE_FROM = "1.32/beta" +log = logging.getLogger(__name__) + + +def charm_channel_missing(charms: Iterable[str], channel: str) -> Tuple[bool, str]: + """Run to test if a given channel has charms for deployment + + Args: + charms: The list of charms to check + channel: The charm channel to check + + Returns: + True if the charm channel or any lower risk exists, False otherwise + Returns a string with the reason if True + """ + risk_levels = ["edge", "beta", "candidate", "stable"] + track, riskiest = channel.split("/") + riskiest_level = risk_levels.index(riskiest) + for app in charms: + for lookup in risk_levels[riskiest_level:]: + out = subprocess.check_output( + ["juju", "info", app, "--channel", f"{track}/{lookup}", "--format", "yaml"] + ) + track_map = yaml.safe_load(out).get("channels", {}).get(track, {}) + if lookup in track_map: + log.info("Found %s in %s", app, f"{track}/{lookup}") + break + else: + return True, f"No suitable channel found for {app} in {channel} to upgrade from" + return False, "" + + +not_found, not_found_reason = charm_channel_missing(CHARMCRAFT_DIRS, CHARM_UPGRADE_FROM) # This pytest mark configures the test environment to use the Canonical Kubernetes # deploying charms from the edge channels, then upgrading them to the built charm. pytestmark = [ + pytest.mark.skipif(not_found, reason=not_found_reason), pytest.mark.bundle( - file="test-bundle.yaml", apps_channel={"k8s": "edge", "k8s-worker": "edge"} + file="test-bundle.yaml", + apps_channel={"k8s": CHARM_UPGRADE_FROM, "k8s-worker": CHARM_UPGRADE_FROM}, ), ] -log = logging.getLogger(__name__) - - @pytest.mark.abort_on_fail async def test_upgrade(kubernetes_cluster: juju.model.Model, ops_test: OpsTest): """Upgrade the model with the provided charms. @@ -73,10 +107,8 @@ async def _refresh(app_name: str): action = await leader.run_action("pre-upgrade-check") await action.wait() with_fault = f"Pre-upgrade of '{app_name}' failed with {yaml.safe_dump(action.results)}" - if app_name == "k8s": - # The k8s charm has a pre-upgrade-check action that works, k8s-worker does not. - assert action.status == "completed", with_fault - assert action.results["return-code"] == 0, with_fault + assert action.status == "completed", with_fault + assert action.results["return-code"] == 0, with_fault await app.refresh(path=charms[app_name].local_path, resources=local_resources) await kubernetes_cluster.wait_for_idle( apps=list(charms.keys()),