From f8fb5a0d1973f7e15b16ec4db5f6e0017d4d54ba Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Dec 2024 02:00:03 +0000 Subject: [PATCH 1/4] Update data-platform-workflows to v24 --- .github/workflows/ci.yaml | 6 +++--- .github/workflows/release.yaml | 4 ++-- .github/workflows/sync_docs.yaml | 2 +- poetry.lock | 20 +++++++++----------- pyproject.toml | 8 ++++---- 5 files changed, 19 insertions(+), 21 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2bd0ac5bdf..0679e77135 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -23,7 +23,7 @@ on: jobs: lint: name: Lint - uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v23.1.1 + uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v24.0.2 unit-test: name: Unit test charm @@ -45,7 +45,7 @@ jobs: build: name: Build charm - uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v23.1.1 + uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v24.0.2 with: cache: true @@ -77,7 +77,7 @@ jobs: - lint - unit-test - build - uses: canonical/data-platform-workflows/.github/workflows/integration_test_charm.yaml@v23.1.1 + uses: canonical/data-platform-workflows/.github/workflows/integration_test_charm.yaml@v24.0.2 with: artifact-prefix: ${{ needs.build.outputs.artifact-prefix }} architecture: ${{ matrix.architecture }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 43b7b8c462..acfa07bb38 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -25,14 +25,14 @@ jobs: build: name: Build charm - uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v23.1.1 + uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v24.0.2 release: name: Release charm needs: - ci-tests - build - uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v23.1.1 + uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v24.0.2 with: channel: 14/edge artifact-prefix: ${{ needs.build.outputs.artifact-prefix }} diff --git a/.github/workflows/sync_docs.yaml b/.github/workflows/sync_docs.yaml index e91b083666..6f6feb8760 100644 --- a/.github/workflows/sync_docs.yaml +++ b/.github/workflows/sync_docs.yaml @@ -10,7 +10,7 @@ on: jobs: sync-docs: name: Sync docs from Discourse - uses: canonical/data-platform-workflows/.github/workflows/sync_docs.yaml@v23.1.1 + uses: canonical/data-platform-workflows/.github/workflows/sync_docs.yaml@v24.0.2 with: reviewers: a-velasco,izmalk permissions: diff --git a/poetry.lock b/poetry.lock index be1a732315..654170c80c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -31,8 +31,8 @@ pytest = "*" [package.source] type = "git" url = "https://github.com/canonical/data-platform-workflows" -reference = "v23.1.1" -resolved_reference = "7dc172891bf274e74eef2a4d822450ca00f55188" +reference = "v24.0.2" +resolved_reference = "f92457d41a392c2549c044efe40651186db62d10" subdirectory = "python/pytest_plugins/allure_pytest_collection_report" [[package]] @@ -556,7 +556,6 @@ files = [ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, @@ -567,7 +566,6 @@ files = [ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, @@ -1844,8 +1842,8 @@ develop = false [package.source] type = "git" url = "https://github.com/canonical/data-platform-workflows" -reference = "v23.1.1" -resolved_reference = "7dc172891bf274e74eef2a4d822450ca00f55188" +reference = "v24.0.2" +resolved_reference = "f92457d41a392c2549c044efe40651186db62d10" subdirectory = "python/pytest_plugins/github_secrets" [[package]] @@ -1882,8 +1880,8 @@ pyyaml = "*" [package.source] type = "git" url = "https://github.com/canonical/data-platform-workflows" -reference = "v23.1.1" -resolved_reference = "7dc172891bf274e74eef2a4d822450ca00f55188" +reference = "v24.0.2" +resolved_reference = "f92457d41a392c2549c044efe40651186db62d10" subdirectory = "python/pytest_plugins/pytest_operator_cache" [[package]] @@ -1901,8 +1899,8 @@ pytest = "*" [package.source] type = "git" url = "https://github.com/canonical/data-platform-workflows" -reference = "v23.1.1" -resolved_reference = "7dc172891bf274e74eef2a4d822450ca00f55188" +reference = "v24.0.2" +resolved_reference = "f92457d41a392c2549c044efe40651186db62d10" subdirectory = "python/pytest_plugins/pytest_operator_groups" [[package]] @@ -2583,4 +2581,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "4ce9e2e9d459baf9de8a132496a0ba6088db99b4388fd8852986adb84a271ea7" +content-hash = "21a1424db60f997e87ff2b587f5d56475317cc5e198355801b40493ee2df7ef0" diff --git a/pyproject.toml b/pyproject.toml index 48435fb3ae..82aa72e375 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,10 +63,10 @@ optional = true [tool.poetry.group.integration.dependencies] pytest = "^8.3.4" -pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workflows", tag = "v23.1.1", subdirectory = "python/pytest_plugins/github_secrets"} +pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workflows", tag = "v24.0.2", subdirectory = "python/pytest_plugins/github_secrets"} pytest-operator = "^0.39.0" -pytest-operator-cache = {git = "https://github.com/canonical/data-platform-workflows", tag = "v23.1.1", subdirectory = "python/pytest_plugins/pytest_operator_cache"} -pytest-operator-groups = {git = "https://github.com/canonical/data-platform-workflows", tag = "v23.1.1", subdirectory = "python/pytest_plugins/pytest_operator_groups"} +pytest-operator-cache = {git = "https://github.com/canonical/data-platform-workflows", tag = "v24.0.2", subdirectory = "python/pytest_plugins/pytest_operator_cache"} +pytest-operator-groups = {git = "https://github.com/canonical/data-platform-workflows", tag = "v24.0.2", subdirectory = "python/pytest_plugins/pytest_operator_groups"} # renovate caret doesn't work: https://github.com/renovatebot/renovate/issues/26940 juju = "<=3.6.0.0" boto3 = "*" @@ -75,7 +75,7 @@ landscape-api-py3 = "^0.9.0" mailmanclient = "^3.3.5" psycopg2-binary = "^2.9.10" allure-pytest = "^2.13.5" -allure-pytest-collection-report = {git = "https://github.com/canonical/data-platform-workflows", tag = "v23.1.1", subdirectory = "python/pytest_plugins/allure_pytest_collection_report"} +allure-pytest-collection-report = {git = "https://github.com/canonical/data-platform-workflows", tag = "v24.0.2", subdirectory = "python/pytest_plugins/allure_pytest_collection_report"} # Testing tools configuration [tool.coverage.run] From de041439dada1015ecb2a1a94f38fb3719fcf6d8 Mon Sep 17 00:00:00 2001 From: Dragomir Penev Date: Thu, 19 Dec 2024 15:15:45 +0200 Subject: [PATCH 2/4] Bump libs --- lib/charms/grafana_agent/v0/cos_agent.py | 3 +- lib/charms/operator_libs_linux/v2/snap.py | 45 +++- lib/charms/postgresql_k8s/v0/postgresql.py | 195 +++++++++--------- .../postgresql_k8s/v0/postgresql_tls.py | 12 +- .../v2/tls_certificates.py | 4 +- 5 files changed, 144 insertions(+), 115 deletions(-) diff --git a/lib/charms/grafana_agent/v0/cos_agent.py b/lib/charms/grafana_agent/v0/cos_agent.py index cc4da25a82..1ea79a625b 100644 --- a/lib/charms/grafana_agent/v0/cos_agent.py +++ b/lib/charms/grafana_agent/v0/cos_agent.py @@ -22,7 +22,6 @@ Using the `COSAgentProvider` object only requires instantiating it, typically in the `__init__` method of your charm (the one which sends telemetry). -The constructor of `COSAgentProvider` has only one required and ten optional parameters: ```python def __init__( @@ -253,7 +252,7 @@ class _MetricsEndpointDict(TypedDict): LIBID = "dc15fa84cef84ce58155fb84f6c6213a" LIBAPI = 0 -LIBPATCH = 11 +LIBPATCH = 12 PYDEPS = ["cosl", "pydantic"] diff --git a/lib/charms/operator_libs_linux/v2/snap.py b/lib/charms/operator_libs_linux/v2/snap.py index 9d09a78d36..d14f864fd9 100644 --- a/lib/charms/operator_libs_linux/v2/snap.py +++ b/lib/charms/operator_libs_linux/v2/snap.py @@ -64,6 +64,7 @@ import socket import subprocess import sys +import time import urllib.error import urllib.parse import urllib.request @@ -83,7 +84,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 7 +LIBPATCH = 9 # Regex to locate 7-bit C1 ANSI sequences @@ -332,7 +333,7 @@ def get(self, key: Optional[str], *, typed: bool = False) -> Any: return self._snap("get", [key]).strip() - def set(self, config: Dict[str, Any], *, typed: bool = False) -> str: + def set(self, config: Dict[str, Any], *, typed: bool = False) -> None: """Set a snap configuration value. Args: @@ -340,11 +341,9 @@ def set(self, config: Dict[str, Any], *, typed: bool = False) -> str: typed: set to True to convert all values in the config into typed values while configuring the snap (set with typed=True). Default is not to convert. """ - if typed: - kv = [f"{key}={json.dumps(val)}" for key, val in config.items()] - return self._snap("set", ["-t"] + kv) - - return self._snap("set", [f"{key}={val}" for key, val in config.items()]) + if not typed: + config = {k: str(v) for k, v in config.items()} + self._snap_client._put_snap_conf(self._name, config) def unset(self, key) -> str: """Unset a snap configuration value. @@ -770,7 +769,33 @@ def _request( headers["Content-Type"] = "application/json" response = self._request_raw(method, path, query, headers, data) - return json.loads(response.read().decode())["result"] + response = json.loads(response.read().decode()) + if response["type"] == "async": + return self._wait(response["change"]) + return response["result"] + + def _wait(self, change_id: str, timeout=300) -> JSONType: + """Wait for an async change to complete. + + The poll time is 100 milliseconds, the same as in snap clients. + """ + deadline = time.time() + timeout + while True: + if time.time() > deadline: + raise TimeoutError(f"timeout waiting for snap change {change_id}") + response = self._request("GET", f"changes/{change_id}") + status = response["status"] + if status == "Done": + return response.get("data") + if status == "Doing" or status == "Do": + time.sleep(0.1) + continue + if status == "Wait": + logger.warning("snap change %s succeeded with status 'Wait'", change_id) + return response.get("data") + raise SnapError( + f"snap change {response.get('kind')!r} id {change_id} failed with status {status}" + ) def _request_raw( self, @@ -818,6 +843,10 @@ def get_installed_snap_apps(self, name: str) -> List: """Query the snap server for apps belonging to a named, currently installed snap.""" return self._request("GET", "apps", {"names": name, "select": "service"}) + def _put_snap_conf(self, name: str, conf: Dict[str, Any]): + """Set the configuration details for an installed snap.""" + return self._request("PUT", f"snaps/{name}/conf", body=conf) + class SnapCache(Mapping): """An abstraction to represent installed/available packages. diff --git a/lib/charms/postgresql_k8s/v0/postgresql.py b/lib/charms/postgresql_k8s/v0/postgresql.py index 4d8d6dc30c..17adae9e61 100644 --- a/lib/charms/postgresql_k8s/v0/postgresql.py +++ b/lib/charms/postgresql_k8s/v0/postgresql.py @@ -21,12 +21,11 @@ import logging from collections import OrderedDict -from typing import Dict, List, Optional, Set, Tuple +from typing import Optional, Set, Tuple import psycopg2 from ops.model import Relation -from psycopg2 import sql -from psycopg2.sql import Composed +from psycopg2.sql import SQL, Composed, Identifier, Literal # The unique Charmhub library identifier, never change it LIBID = "24ee217a54e840a598ff21a079c3e678" @@ -36,7 +35,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 39 +LIBPATCH = 40 INVALID_EXTRA_USER_ROLE_BLOCKING_MESSAGE = "invalid role(s) for extra user roles" @@ -62,7 +61,7 @@ class PostgreSQLCreateDatabaseError(Exception): class PostgreSQLCreateUserError(Exception): """Exception raised when creating a user fails.""" - def __init__(self, message: str = None): + def __init__(self, message: Optional[str] = None): super().__init__(message) self.message = message @@ -109,14 +108,14 @@ def __init__( user: str, password: str, database: str, - system_users: List[str] = [], + system_users: Optional[list[str]] = None, ): self.primary_host = primary_host self.current_host = current_host self.user = user self.password = password self.database = database - self.system_users = system_users + self.system_users = system_users if system_users else [] def _configure_pgaudit(self, enable: bool) -> None: connection = None @@ -138,7 +137,7 @@ def _configure_pgaudit(self, enable: bool) -> None: connection.close() def _connect_to_database( - self, database: str = None, database_host: str = None + self, database: Optional[str] = None, database_host: Optional[str] = None ) -> psycopg2.extensions.connection: """Creates a connection to the database. @@ -162,8 +161,8 @@ def create_database( self, database: str, user: str, - plugins: List[str] = [], - client_relations: List[Relation] = [], + plugins: Optional[list[str]] = None, + client_relations: Optional[list[Relation]] = None, ) -> None: """Creates a new database and grant privileges to a user on it. @@ -173,21 +172,25 @@ def create_database( plugins: extensions to enable in the new database. client_relations: current established client relations. """ + plugins = plugins if plugins else [] + client_relations = client_relations if client_relations else [] try: connection = self._connect_to_database() cursor = connection.cursor() - cursor.execute(f"SELECT datname FROM pg_database WHERE datname='{database}';") + cursor.execute( + SQL("SELECT datname FROM pg_database WHERE datname={};").format(Literal(database)) + ) if cursor.fetchone() is None: - cursor.execute(sql.SQL("CREATE DATABASE {};").format(sql.Identifier(database))) + cursor.execute(SQL("CREATE DATABASE {};").format(Identifier(database))) cursor.execute( - sql.SQL("REVOKE ALL PRIVILEGES ON DATABASE {} FROM PUBLIC;").format( - sql.Identifier(database) + SQL("REVOKE ALL PRIVILEGES ON DATABASE {} FROM PUBLIC;").format( + Identifier(database) ) ) - for user_to_grant_access in [user, "admin"] + self.system_users: + for user_to_grant_access in [user, "admin", *self.system_users]: cursor.execute( - sql.SQL("GRANT ALL PRIVILEGES ON DATABASE {} TO {};").format( - sql.Identifier(database), sql.Identifier(user_to_grant_access) + SQL("GRANT ALL PRIVILEGES ON DATABASE {} TO {};").format( + Identifier(database), Identifier(user_to_grant_access) ) ) relations_accessing_this_database = 0 @@ -195,26 +198,29 @@ def create_database( for data in relation.data.values(): if data.get("database") == database: relations_accessing_this_database += 1 - with self._connect_to_database(database=database) as conn: - with conn.cursor() as curs: - curs.execute( - "SELECT schema_name FROM information_schema.schemata WHERE schema_name NOT LIKE 'pg_%' and schema_name <> 'information_schema';" - ) - schemas = [row[0] for row in curs.fetchall()] - statements = self._generate_database_privileges_statements( - relations_accessing_this_database, schemas, user - ) - for statement in statements: - curs.execute(statement) + with self._connect_to_database(database=database) as conn, conn.cursor() as curs: + curs.execute( + "SELECT schema_name FROM information_schema.schemata WHERE schema_name NOT LIKE 'pg_%' and schema_name <> 'information_schema';" + ) + schemas = [row[0] for row in curs.fetchall()] + statements = self._generate_database_privileges_statements( + relations_accessing_this_database, schemas, user + ) + for statement in statements: + curs.execute(statement) except psycopg2.Error as e: logger.error(f"Failed to create database: {e}") - raise PostgreSQLCreateDatabaseError() + raise PostgreSQLCreateDatabaseError() from e # Enable preset extensions self.enable_disable_extensions({plugin: True for plugin in plugins}, database) def create_user( - self, user: str, password: str = None, admin: bool = False, extra_user_roles: str = None + self, + user: str, + password: Optional[str] = None, + admin: bool = False, + extra_user_roles: Optional[str] = None, ) -> None: """Creates a database user. @@ -249,7 +255,9 @@ def create_user( with self._connect_to_database() as connection, connection.cursor() as cursor: # Create or update the user. - cursor.execute(f"SELECT TRUE FROM pg_roles WHERE rolname='{user}';") + cursor.execute( + SQL("SELECT TRUE FROM pg_roles WHERE rolname={};").format(Literal(user)) + ) if cursor.fetchone() is not None: user_definition = "ALTER ROLE {}" else: @@ -257,22 +265,20 @@ def create_user( user_definition += f"WITH {'NOLOGIN' if user == 'admin' else 'LOGIN'}{' SUPERUSER' if admin else ''} ENCRYPTED PASSWORD '{password}'{'IN ROLE admin CREATEDB' if admin_role else ''}" if privileges: user_definition += f" {' '.join(privileges)}" - cursor.execute(sql.SQL("BEGIN;")) - cursor.execute(sql.SQL("SET LOCAL log_statement = 'none';")) - cursor.execute(sql.SQL(f"{user_definition};").format(sql.Identifier(user))) - cursor.execute(sql.SQL("COMMIT;")) + cursor.execute(SQL("BEGIN;")) + cursor.execute(SQL("SET LOCAL log_statement = 'none';")) + cursor.execute(SQL(f"{user_definition};").format(Identifier(user))) + cursor.execute(SQL("COMMIT;")) # Add extra user roles to the new user. if roles: for role in roles: cursor.execute( - sql.SQL("GRANT {} TO {};").format( - sql.Identifier(role), sql.Identifier(user) - ) + SQL("GRANT {} TO {};").format(Identifier(role), Identifier(user)) ) except psycopg2.Error as e: logger.error(f"Failed to create user: {e}") - raise PostgreSQLCreateUserError() + raise PostgreSQLCreateUserError() from e def delete_user(self, user: str) -> None: """Deletes a database user. @@ -298,20 +304,22 @@ def delete_user(self, user: str) -> None: database ) as connection, connection.cursor() as cursor: cursor.execute( - sql.SQL("REASSIGN OWNED BY {} TO {};").format( - sql.Identifier(user), sql.Identifier(self.user) + SQL("REASSIGN OWNED BY {} TO {};").format( + Identifier(user), Identifier(self.user) ) ) - cursor.execute(sql.SQL("DROP OWNED BY {};").format(sql.Identifier(user))) + cursor.execute(SQL("DROP OWNED BY {};").format(Identifier(user))) # Delete the user. with self._connect_to_database() as connection, connection.cursor() as cursor: - cursor.execute(sql.SQL("DROP ROLE {};").format(sql.Identifier(user))) + cursor.execute(SQL("DROP ROLE {};").format(Identifier(user))) except psycopg2.Error as e: logger.error(f"Failed to delete user: {e}") - raise PostgreSQLDeleteUserError() + raise PostgreSQLDeleteUserError() from e - def enable_disable_extensions(self, extensions: Dict[str, bool], database: str = None) -> None: + def enable_disable_extensions( + self, extensions: dict[str, bool], database: Optional[str] = None + ) -> None: """Enables or disables a PostgreSQL extension. Args: @@ -353,20 +361,20 @@ def enable_disable_extensions(self, extensions: Dict[str, bool], database: str = pass except psycopg2.errors.DependentObjectsStillExist: raise - except psycopg2.Error: - raise PostgreSQLEnableDisableExtensionError() + except psycopg2.Error as e: + raise PostgreSQLEnableDisableExtensionError() from e finally: if connection is not None: connection.close() def _generate_database_privileges_statements( - self, relations_accessing_this_database: int, schemas: List[str], user: str - ) -> List[Composed]: + self, relations_accessing_this_database: int, schemas: list[str], user: str + ) -> list[Composed]: """Generates a list of databases privileges statements.""" statements = [] if relations_accessing_this_database == 1: statements.append( - sql.SQL( + SQL( """DO $$ DECLARE r RECORD; BEGIN @@ -386,44 +394,42 @@ def _generate_database_privileges_statements( END LOOP; END; $$;""" ).format( - sql.Identifier(user), - sql.Identifier(user), - sql.Identifier(user), - sql.Identifier(user), - sql.Identifier(user), - sql.Identifier(user), + Identifier(user), + Identifier(user), + Identifier(user), + Identifier(user), + Identifier(user), + Identifier(user), ) ) statements.append( - """UPDATE pg_catalog.pg_largeobject_metadata -SET lomowner = (SELECT oid FROM pg_roles WHERE rolname = '{}') -WHERE lomowner = (SELECT oid FROM pg_roles WHERE rolname = '{}');""".format(user, self.user) + SQL( + "UPDATE pg_catalog.pg_largeobject_metadata\n" + "SET lomowner = (SELECT oid FROM pg_roles WHERE rolname = {})\n" + "WHERE lomowner = (SELECT oid FROM pg_roles WHERE rolname = {});" + ).format(Literal(user), Literal(self.user)) ) for schema in schemas: statements.append( - sql.SQL("ALTER SCHEMA {} OWNER TO {};").format( - sql.Identifier(schema), sql.Identifier(user) + SQL("ALTER SCHEMA {} OWNER TO {};").format( + Identifier(schema), Identifier(user) ) ) else: for schema in schemas: - schema = sql.Identifier(schema) + schema = Identifier(schema) statements.extend([ - sql.SQL("GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA {} TO {};").format( - schema, sql.Identifier(user) - ), - sql.SQL("GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {} TO {};").format( - schema, sql.Identifier(user) + SQL("GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA {} TO {};").format( + schema, Identifier(user) ), - sql.SQL("GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA {} TO {};").format( - schema, sql.Identifier(user) + SQL("GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {} TO {};").format( + schema, Identifier(user) ), - sql.SQL("GRANT USAGE ON SCHEMA {} TO {};").format( - schema, sql.Identifier(user) - ), - sql.SQL("GRANT CREATE ON SCHEMA {} TO {};").format( - schema, sql.Identifier(user) + SQL("GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA {} TO {};").format( + schema, Identifier(user) ), + SQL("GRANT USAGE ON SCHEMA {} TO {};").format(schema, Identifier(user)), + SQL("GRANT CREATE ON SCHEMA {} TO {};").format(schema, Identifier(user)), ]) return statements @@ -435,7 +441,7 @@ def get_last_archived_wal(self) -> str: return cursor.fetchone()[0] except psycopg2.Error as e: logger.error(f"Failed to get PostgreSQL last archived WAL: {e}") - raise PostgreSQLGetLastArchivedWALError() + raise PostgreSQLGetLastArchivedWALError() from e def get_current_timeline(self) -> str: """Get the timeline id for the current PostgreSQL unit.""" @@ -445,7 +451,7 @@ def get_current_timeline(self) -> str: return cursor.fetchone()[0] except psycopg2.Error as e: logger.error(f"Failed to get PostgreSQL current timeline id: {e}") - raise PostgreSQLGetCurrentTimelineError() + raise PostgreSQLGetCurrentTimelineError() from e def get_postgresql_text_search_configs(self) -> Set[str]: """Returns the PostgreSQL available text search configs. @@ -479,10 +485,7 @@ def get_postgresql_version(self, current_host=True) -> str: Returns: PostgreSQL version number. """ - if current_host: - host = self.current_host - else: - host = None + host = self.current_host if current_host else None try: with self._connect_to_database( database_host=host @@ -492,7 +495,7 @@ def get_postgresql_version(self, current_host=True) -> str: return cursor.fetchone()[0].split(" ")[1] except psycopg2.Error as e: logger.error(f"Failed to get PostgreSQL version: {e}") - raise PostgreSQLGetPostgreSQLVersionError() + raise PostgreSQLGetPostgreSQLVersionError() from e def is_tls_enabled(self, check_current_host: bool = False) -> bool: """Returns whether TLS is enabled. @@ -527,7 +530,7 @@ def list_users(self) -> Set[str]: return {username[0] for username in usernames} except psycopg2.Error as e: logger.error(f"Failed to list PostgreSQL database users: {e}") - raise PostgreSQLListUsersError() + raise PostgreSQLListUsersError() from e def list_valid_privileges_and_roles(self) -> Tuple[Set[str], Set[str]]: """Returns two sets with valid privileges and roles. @@ -558,8 +561,8 @@ def set_up_database(self) -> None: cursor.execute("REVOKE CREATE ON SCHEMA public FROM PUBLIC;") for user in self.system_users: cursor.execute( - sql.SQL("GRANT ALL PRIVILEGES ON DATABASE postgres TO {};").format( - sql.Identifier(user) + SQL("GRANT ALL PRIVILEGES ON DATABASE postgres TO {};").format( + Identifier(user) ) ) self.create_user( @@ -569,13 +572,13 @@ def set_up_database(self) -> None: cursor.execute("GRANT CONNECT ON DATABASE postgres TO admin;") except psycopg2.Error as e: logger.error(f"Failed to set up databases: {e}") - raise PostgreSQLDatabasesSetupError() + raise PostgreSQLDatabasesSetupError() from e finally: if connection is not None: connection.close() def update_user_password( - self, username: str, password: str, database_host: str = None + self, username: str, password: str, database_host: Optional[str] = None ) -> None: """Update a user password. @@ -592,17 +595,17 @@ def update_user_password( with self._connect_to_database( database_host=database_host ) as connection, connection.cursor() as cursor: - cursor.execute(sql.SQL("BEGIN;")) - cursor.execute(sql.SQL("SET LOCAL log_statement = 'none';")) + cursor.execute(SQL("BEGIN;")) + cursor.execute(SQL("SET LOCAL log_statement = 'none';")) cursor.execute( - sql.SQL("ALTER USER {} WITH ENCRYPTED PASSWORD '" + password + "';").format( - sql.Identifier(username) + SQL("ALTER USER {} WITH ENCRYPTED PASSWORD '" + password + "';").format( + Identifier(username) ) ) - cursor.execute(sql.SQL("COMMIT;")) + cursor.execute(SQL("COMMIT;")) except psycopg2.Error as e: logger.error(f"Failed to update user password: {e}") - raise PostgreSQLUpdateUserPasswordError() + raise PostgreSQLUpdateUserPasswordError() from e finally: if connection is not None: connection.close() @@ -626,8 +629,8 @@ def is_restart_pending(self) -> bool: @staticmethod def build_postgresql_parameters( - config_options: Dict, available_memory: int, limit_memory: Optional[int] = None - ) -> Optional[Dict]: + config_options: dict, available_memory: int, limit_memory: Optional[int] = None + ) -> Optional[dict]: """Builds the PostgreSQL parameters. Args: @@ -692,9 +695,9 @@ def validate_date_style(self, date_style: str) -> bool: database_host=self.current_host ) as connection, connection.cursor() as cursor: cursor.execute( - sql.SQL( + SQL( "SET DateStyle to {};", - ).format(sql.Identifier(date_style)) + ).format(Identifier(date_style)) ) return True except psycopg2.Error: diff --git a/lib/charms/postgresql_k8s/v0/postgresql_tls.py b/lib/charms/postgresql_k8s/v0/postgresql_tls.py index 740a607fbc..bdc7159a9d 100644 --- a/lib/charms/postgresql_k8s/v0/postgresql_tls.py +++ b/lib/charms/postgresql_k8s/v0/postgresql_tls.py @@ -33,7 +33,8 @@ ) from ops.charm import ActionEvent, RelationBrokenEvent from ops.framework import Object -from ops.pebble import ConnectionError, PathError, ProtocolError +from ops.pebble import ConnectionError as PebbleConnectionError +from ops.pebble import PathError, ProtocolError from tenacity import RetryError # The unique Charmhub library identifier, never change it @@ -44,7 +45,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version. -LIBPATCH = 9 +LIBPATCH = 11 logger = logging.getLogger(__name__) SCOPE = "unit" @@ -81,10 +82,7 @@ def _on_set_tls_private_key(self, event: ActionEvent) -> None: def _request_certificate(self, param: Optional[str]): """Request a certificate to TLS Certificates Operator.""" - if param is None: - key = generate_private_key() - else: - key = self._parse_tls_file(param) + key = generate_private_key() if param is None else self._parse_tls_file(param) csr = generate_csr( private_key=key, @@ -143,7 +141,7 @@ def _on_certificate_available(self, event: CertificateAvailableEvent) -> None: logger.debug("Cannot push TLS certificates at this moment") event.defer() return - except (ConnectionError, PathError, ProtocolError, RetryError) as e: + except (PebbleConnectionError, PathError, ProtocolError, RetryError) as e: logger.error("Cannot push TLS certificates: %r", e) event.defer() return diff --git a/lib/charms/tls_certificates_interface/v2/tls_certificates.py b/lib/charms/tls_certificates_interface/v2/tls_certificates.py index 9f67833ba7..c232362feb 100644 --- a/lib/charms/tls_certificates_interface/v2/tls_certificates.py +++ b/lib/charms/tls_certificates_interface/v2/tls_certificates.py @@ -307,7 +307,7 @@ def _on_all_certificates_invalidated(self, event: AllCertificatesInvalidatedEven # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 28 +LIBPATCH = 29 PYDEPS = ["cryptography", "jsonschema"] @@ -459,7 +459,7 @@ def restore(self, snapshot: dict): class CertificateExpiringEvent(EventBase): """Charm Event triggered when a TLS certificate is almost expired.""" - def __init__(self, handle, certificate: str, expiry: str): + def __init__(self, handle: Handle, certificate: str, expiry: str): """CertificateExpiringEvent. Args: From b941aac1a068a28ecabb77654a73ebf5a3ace555 Mon Sep 17 00:00:00 2001 From: Dragomir Penev Date: Thu, 19 Dec 2024 15:20:09 +0200 Subject: [PATCH 3/4] Switch to Juju 3.6 --- .github/workflows/ci.yaml | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0679e77135..f639bf35f6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -57,10 +57,8 @@ jobs: - agent: 2.9.51 # renovate: juju-agent-pin-minor libjuju: ==2.9.49.1 # renovate: latest libjuju 2 allure_on_amd64: false - - agent: 3.4.6 # renovate: juju-agent-pin-minor + - agent: 3.6.1 # renovate: juju-agent-pin-minor allure_on_amd64: true - - snap_channel: 3.6/stable - allure_on_amd64: false architecture: - amd64 include: @@ -68,11 +66,7 @@ jobs: agent: 3.4.6 # renovate: juju-agent-pin-minor allure_on_amd64: true architecture: arm64 - - juju: - snap_channel: 3.6/stable - allure_on_amd64: false - architecture: arm64 - name: Integration | ${{ matrix.juju.agent || matrix.juju.snap_channel }} | ${{ matrix.architecture }} + name: Integration | ${{ matrix.juju.agent }} | ${{ matrix.architecture }} needs: - lint - unit-test @@ -83,7 +77,6 @@ jobs: architecture: ${{ matrix.architecture }} cloud: lxd juju-agent-version: ${{ matrix.juju.agent }} - juju-snap-channel: ${{ matrix.juju.snap_channel }} libjuju-version-constraint: ${{ matrix.juju.libjuju }} _beta_allure_report: ${{ matrix.juju.allure_on_amd64 && matrix.architecture == 'amd64' }} secrets: From c85c2cc6bf5aa379d90010c970c9ee0f72d55bda Mon Sep 17 00:00:00 2001 From: Dragomir Penev Date: Thu, 19 Dec 2024 15:52:31 +0200 Subject: [PATCH 4/4] Also switch arm agent --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f639bf35f6..70a25630d2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -63,7 +63,7 @@ jobs: - amd64 include: - juju: - agent: 3.4.6 # renovate: juju-agent-pin-minor + agent: 3.6.1 # renovate: juju-agent-pin-minor allure_on_amd64: true architecture: arm64 name: Integration | ${{ matrix.juju.agent }} | ${{ matrix.architecture }}