Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DPE-5324] Increase linting rules #649

Merged
merged 6 commits into from
Oct 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -100,13 +100,13 @@ target-version = ["py38"]
[tool.ruff]
# preview and explicit preview are enabled for CPY001
preview = true
target-version = "py38"
target-version = "py310"
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No libs or focal, so it is safe.

src = ["src", "."]
line-length = 99

[tool.ruff.lint]
explicit-preview-rules = true
select = ["A", "E", "W", "F", "C", "N", "D", "I001", "CPY001"]
select = ["A", "E", "W", "F", "C", "N", "D", "I001", "B", "CPY", "RUF", "S", "SIM", "UP", "TCH"]
extend-ignore = [
"D203",
"D204",
Expand All @@ -125,12 +125,19 @@ extend-ignore = [
ignore = ["E501", "D107"]

[tool.ruff.lint.per-file-ignores]
"tests/*" = ["D100", "D101", "D102", "D103", "D104"]
"tests/*" = [
"D100", "D101", "D102", "D103", "D104",
# Asserts
"B011",
# Disable security checks for tests
"S",
]

[tool.ruff.lint.flake8-copyright]
# Check for properly formatted copyright header in each file
author = "Canonical Ltd."
notice-rgx = "Copyright\\s\\d{4}([-,]\\d{4})*\\s+"
min-file-size = 1

[tool.ruff.lint.mccabe]
max-complexity = 10
Expand Down
94 changes: 39 additions & 55 deletions src/backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@
import tempfile
from datetime import datetime, timezone
from pathlib import Path
from subprocess import PIPE, TimeoutExpired, run
from typing import Dict, List, Optional, Tuple
from subprocess import TimeoutExpired, run

import boto3 as boto3
import botocore
Expand Down Expand Up @@ -92,7 +91,7 @@ def _tls_ca_chain_filename(self) -> str:
return f"{self.charm._storage_path}/pgbackrest-tls-ca-chain.crt"
return ""

def _are_backup_settings_ok(self) -> Tuple[bool, Optional[str]]:
def _are_backup_settings_ok(self) -> tuple[bool, str | None]:
"""Validates whether backup settings are OK."""
if self.model.get_relation(self.relation_name) is None:
return (
Expand All @@ -112,17 +111,18 @@ def _can_initialise_stanza(self) -> bool:
# Don't allow stanza initialisation if this unit hasn't started the database
# yet and either hasn't joined the peer relation yet or hasn't configured TLS
# yet while other unit already has TLS enabled.
if not self.charm._patroni.member_started and (
(len(self.charm._peers.data.keys()) == 2)
or (
"tls" not in self.charm.unit_peer_data
and any("tls" in unit_data for _, unit_data in self.charm._peers.data.items())
return not (
not self.charm._patroni.member_started
and (
(len(self.charm._peers.data.keys()) == 2)
or (
"tls" not in self.charm.unit_peer_data
and any("tls" in unit_data for _, unit_data in self.charm._peers.data.items())
)
)
):
return False
return True
)

def _can_unit_perform_backup(self) -> Tuple[bool, Optional[str]]:
def _can_unit_perform_backup(self) -> tuple[bool, str | None]:
"""Validates whether this unit can perform a backup."""
if self.charm.is_blocked:
return False, "Unit is in a blocking state"
Expand Down Expand Up @@ -154,7 +154,7 @@ def _can_unit_perform_backup(self) -> Tuple[bool, Optional[str]]:

return self._are_backup_settings_ok()

def can_use_s3_repository(self) -> Tuple[bool, Optional[str]]:
def can_use_s3_repository(self) -> tuple[bool, str | None]:
"""Returns whether the charm was configured to use another cluster repository."""
# Prevent creating backups and storing in another cluster repository.
try:
Expand All @@ -166,10 +166,8 @@ def can_use_s3_repository(self) -> Tuple[bool, Optional[str]]:
# Raise an error if the connection timeouts, so the user has the possibility to
# fix network issues and call juju resolve to re-trigger the hook that calls
# this method.
logger.error(
f"error: {str(e)} - please fix the error and call juju resolve on this unit"
)
raise TimeoutError
logger.error(f"error: {e!s} - please fix the error and call juju resolve on this unit")
raise TimeoutError from e

else:
if return_code != 0:
Expand All @@ -184,11 +182,11 @@ def can_use_s3_repository(self) -> Tuple[bool, Optional[str]]:
])
if return_code != 0:
raise Exception(error)
system_identifier_from_instance = [
system_identifier_from_instance = next(
line
for line in system_identifier_from_instance.splitlines()
if "Database system identifier" in line
][0].split(" ")[-1]
).split(" ")[-1]
system_identifier_from_stanza = str(stanza.get("db")[0]["system-id"])
if system_identifier_from_instance != system_identifier_from_stanza or stanza.get(
"name"
Expand All @@ -207,7 +205,7 @@ def _change_connectivity_to_database(self, connectivity: bool) -> None:
self.charm.unit_peer_data.update({"connectivity": "on" if connectivity else "off"})
self.charm.update_config()

def _construct_endpoint(self, s3_parameters: Dict) -> str:
def _construct_endpoint(self, s3_parameters: dict) -> str:
"""Construct the S3 service endpoint using the region.

This is needed when the provided endpoint is from AWS, and it doesn't contain the region.
Expand Down Expand Up @@ -260,9 +258,7 @@ def _create_bucket_if_not_exists(self) -> None:
# Re-raise the error if the connection timeouts, so the user has the possibility to
# fix network issues and call juju resolve to re-trigger the hook that calls
# this method.
logger.error(
f"error: {str(e)} - please fix the error and call juju resolve on this unit"
)
logger.error(f"error: {e!s} - please fix the error and call juju resolve on this unit")
raise e
except ClientError:
logger.warning("Bucket %s doesn't exist or you don't have access to it.", bucket_name)
Expand All @@ -286,17 +282,17 @@ def _empty_data_files(self) -> bool:
if path.exists() and path.is_dir():
shutil.rmtree(path)
except OSError as e:
logger.warning(f"Failed to remove contents of the data directory with error: {str(e)}")
logger.warning(f"Failed to remove contents of the data directory with error: {e!s}")
return False

return True

def _execute_command(
self,
command: List[str],
command_input: bytes = None,
timeout: int = None,
) -> Tuple[int, str, str]:
command: list[str],
command_input: bytes | None = None,
timeout: int | None = None,
) -> tuple[int, str, str]:
"""Execute a command in the workload container."""

def demote():
Expand All @@ -308,11 +304,11 @@ def result():

return result

process = run(
# Input is generated by the charm
process = run( # noqa: S603
command,
input=command_input,
stdout=PIPE,
stderr=PIPE,
capture_output=True,
preexec_fn=demote(),
timeout=timeout,
)
Expand Down Expand Up @@ -349,17 +345,7 @@ def _format_backup_list(self, backup_list) -> str:
path,
) in backup_list:
backups.append(
"{:<20s} | {:<19s} | {:<8s} | {:<20s} | {:<23s} | {:<20s} | {:<20s} | {:<8s} | {:s}".format(
backup_id,
backup_action,
backup_status,
reference,
lsn_start_stop,
start,
stop,
backup_timeline,
path,
)
f"{backup_id:<20s} | {backup_action:<19s} | {backup_status:<8s} | {reference:<20s} | {lsn_start_stop:<23s} | {start:<20s} | {stop:<20s} | {backup_timeline:<8s} | {path:s}"
)
return "\n".join(backups)

Expand Down Expand Up @@ -414,7 +400,7 @@ def _generate_backup_list_output(self) -> str:
backup_path,
))

for timeline, (timeline_stanza, timeline_id) in self._list_timelines().items():
for timeline, (_, timeline_id) in self._list_timelines().items():
backup_list.append((
timeline,
"restore",
Expand Down Expand Up @@ -542,7 +528,7 @@ def _parse_psql_timestamp(self, timestamp: str) -> datetime:
dt = dt.astimezone(tz=timezone.utc)
return dt.replace(tzinfo=None)

def _parse_backup_id(self, label) -> Tuple[str, str]:
def _parse_backup_id(self, label) -> tuple[str, str]:
"""Parse backup ID as a timestamp and its type."""
if label[-1] == "F":
timestamp = label
Expand Down Expand Up @@ -751,7 +737,7 @@ def _on_s3_credential_gone(self, _) -> None:
if self.charm.is_blocked and self.charm.unit.status.message in S3_BLOCK_MESSAGES:
self.charm.unit.status = ActiveStatus()

def _on_create_backup_action(self, event) -> None: # noqa: C901
def _on_create_backup_action(self, event) -> None:
"""Request that pgBackRest creates a backup."""
backup_type = event.params.get("type", "full")
if backup_type not in BACKUP_TYPE_OVERRIDES:
Expand Down Expand Up @@ -788,7 +774,7 @@ def _on_create_backup_action(self, event) -> None: # noqa: C901
Model Name: {self.model.name}
Application Name: {self.model.app.name}
Unit Name: {self.charm.unit.name}
Juju Version: {str(juju_version)}
Juju Version: {juju_version!s}
"""
if not self._upload_content_to_s3(
metadata,
Expand Down Expand Up @@ -826,7 +812,7 @@ def _on_create_backup_action(self, event) -> None: # noqa: C901
def _run_backup(
self,
event: ActionEvent,
s3_parameters: Dict,
s3_parameters: dict,
datetime_backup_requested: str,
backup_type: str,
) -> None:
Expand Down Expand Up @@ -920,7 +906,7 @@ def _on_list_backups_action(self, event) -> None:
event.set_results({"backups": formatted_list})
except ListBackupsError as e:
logger.exception(e)
event.fail(f"Failed to list PostgreSQL backups with error: {str(e)}")
event.fail(f"Failed to list PostgreSQL backups with error: {e!s}")

def _on_restore_action(self, event): # noqa: C901
"""Request that pgBackRest restores a backup."""
Expand All @@ -940,10 +926,8 @@ def _on_restore_action(self, event): # noqa: C901
try:
backups = self._list_backups(show_failed=False)
timelines = self._list_timelines()
is_backup_id_real = backup_id and backup_id in backups.keys()
is_backup_id_timeline = (
backup_id and not is_backup_id_real and backup_id in timelines.keys()
)
is_backup_id_real = backup_id and backup_id in backups
is_backup_id_timeline = backup_id and not is_backup_id_real and backup_id in timelines
if backup_id and not is_backup_id_real and not is_backup_id_timeline:
error_message = f"Invalid backup-id: {backup_id}"
logger.error(f"Restore failed: {error_message}")
Expand Down Expand Up @@ -1145,7 +1129,7 @@ def _render_pgbackrest_conf_file(self) -> bool:
self._tls_ca_chain_filename, "\n".join(s3_parameters["tls-ca-chain"]), 0o644
)

with open("templates/pgbackrest.conf.j2", "r") as file:
with open("templates/pgbackrest.conf.j2") as file:
template = Template(file.read())
# Render the template file with the correct values.
rendered = template.render(
Expand Down Expand Up @@ -1177,7 +1161,7 @@ def _restart_database(self) -> None:
self.charm.update_config()
self.charm._patroni.start_patroni()

def _retrieve_s3_parameters(self) -> Tuple[Dict, List[str]]:
def _retrieve_s3_parameters(self) -> tuple[dict, list[str]]:
"""Retrieve S3 parameters from the S3 integrator relation."""
s3_parameters = self.s3_client.get_s3_connection_info()
required_parameters = [
Expand Down Expand Up @@ -1254,7 +1238,7 @@ def _upload_content_to_s3(
self: str,
content: str,
s3_path: str,
s3_parameters: Dict,
s3_parameters: dict,
) -> bool:
"""Uploads the provided contents to the provided S3 bucket.

Expand Down
Loading
Loading