Skip to content

Commit

Permalink
Add RUF (ruff specific rules) to linting rules
Browse files Browse the repository at this point in the history
  • Loading branch information
larsevj committed Sep 16, 2024
1 parent d69bbe3 commit 2ad7010
Show file tree
Hide file tree
Showing 71 changed files with 197 additions and 189 deletions.
10 changes: 9 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ select = [
"NPY", # numpy specific rules
"C4", # flake8-comprehensions
"ASYNC", # flake8-async
"RUF", # ruff specific rules
]
preview = true
ignore = ["PLW2901", # redefined-loop-name
Expand All @@ -185,11 +186,18 @@ ignore = ["PLW2901", # redefined-loop-name
"PLW3201", # bad-dunder-method-name
]

# Allow EN DASH (U+2013)
allowed-confusables = [""]

[tool.ruff.lint.extend-per-file-ignores]
"tests/*" = [
"PLW0603" # global-statement
"PLW0603", # global-statement
"RUF029", # unused-async
"RUF018", # assignment-in-assert
"RUF006", # asyncio-dangling-task
]
"src/ert/dark_storage/json_schema/__init__.py" = ["F401"]
"src/ert/dark_storage/*" = ["RUF029"] # unused-async

[tool.ruff.lint.pylint]
max-args = 20
6 changes: 3 additions & 3 deletions src/_ert/forward_model_runner/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ async def _send(self, msg: AnyStr) -> None:
except ConnectionClosedOK as exception:
_error_msg = (
f"Connection closed received from the server {self.url}! "
f" Exception from {type(exception)}: {str(exception)}"
f" Exception from {type(exception)}: {exception!s}"
)
raise ClientConnectionClosedOK(_error_msg) from exception
except (
Expand All @@ -112,15 +112,15 @@ async def _send(self, msg: AnyStr) -> None:
f"Not able to establish the "
f"websocket connection {self.url}! Max retries reached!"
" Check for firewall issues."
f" Exception from {type(exception)}: {str(exception)}"
f" Exception from {type(exception)}: {exception!s}"
)
raise ClientConnectionError(_error_msg) from exception
except ConnectionClosedError as exception:
if retry == self._max_retries:
_error_msg = (
f"Not been able to send the event"
f" to {self.url}! Max retries reached!"
f" Exception from {type(exception)}: {str(exception)}"
f" Exception from {type(exception)}: {exception!s}"
)
raise ClientConnectionError(_error_msg) from exception
await asyncio.sleep(0.2 + self._timeout_multiplier * retry)
Expand Down
2 changes: 1 addition & 1 deletion src/_ert/forward_model_runner/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def __init__(self, job_data, index, sleep_interval=1):
self.std_err = job_data.get("stderr")
self.std_out = job_data.get("stdout")

def run(self): # noqa: PLR0912, PLR0915
def run(self):
start_message = Start(self)

errors = self._check_job_files()
Expand Down
2 changes: 1 addition & 1 deletion src/_ert/threading.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def _raise_on_main_thread(exception: BaseException) -> None:


def _handler(signum: int, frametype: FrameType | None) -> None:
global _current_exception # noqa: PLW0603
global _current_exception
if not _current_exception:
return
current_exception, _current_exception = _current_exception, None
Expand Down
4 changes: 1 addition & 3 deletions src/ert/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,7 @@ def run_cli(args: Namespace, plugin_manager: Optional[ErtPluginManager] = None)
with contextlib.ExitStack() as exit_stack:
out: TextIO
if args.disable_monitoring:
out = exit_stack.enter_context(
open(os.devnull, "w", encoding="utf-8") # noqa
)
out = exit_stack.enter_context(open(os.devnull, "w", encoding="utf-8"))
else:
out = sys.stderr
monitor = Monitor(out=out, color_always=args.color_always)
Expand Down
13 changes: 1 addition & 12 deletions src/ert/config/_read_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,18 +334,7 @@ def _read_spec(

with open(spec, mode) as fp:
for entry in resfo.lazy_read(fp, format):
if all(
p is not None
for p in (
[
date,
n,
nx,
ny,
]
+ list(arrays.values())
)
):
if all(p is not None for p in [date, n, nx, ny, *arrays.values()]):
break
kw = entry.read_keyword()
if kw in arrays:
Expand Down
4 changes: 2 additions & 2 deletions src/ert/config/ert_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -542,7 +542,7 @@ def _create_list_of_forward_model_steps_to_run(
except ForwardModelStepValidationError as err:
errors.append(
ConfigValidationError.with_context(
f"Forward model step pre-experiment validation failed: {str(err)}",
f"Forward model step pre-experiment validation failed: {err!s}",
context=fm_step.name,
),
)
Expand Down Expand Up @@ -694,7 +694,7 @@ def handle_default(fm_step: ForwardModelStep, arg: str) -> str:
job_list_errors.append(
ErrorInfo(
message=f"Validation failed for "
f"forward model step {fm_step.name}: {str(exc)}"
f"forward model step {fm_step.name}: {exc!s}"
).set_context(fm_step.name)
)

Expand Down
4 changes: 2 additions & 2 deletions src/ert/config/ert_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ class CancelPluginException(Exception):


class ErtPlugin(ErtScript, ABC):
def getArguments(self, args: List[Any]) -> List[Any]: # noqa: PLR6301
def getArguments(self, args: List[Any]) -> List[Any]:
return []

def getName(self) -> str:
return str(self.__class__)

def getDescription(self) -> str: # noqa: PLR6301
def getDescription(self) -> str:
return "No description provided!"
2 changes: 1 addition & 1 deletion src/ert/config/ert_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def initializeAndRun(
return uw.args[0]
except Exception as e:
full_trace = "".join(traceback.format_exception(*sys.exc_info()))
self.output_stack_trace(f"{str(e)}\n{full_trace}")
self.output_stack_trace(f"{e!s}\n{full_trace}")
return None
finally:
self.cleanup()
Expand Down
35 changes: 18 additions & 17 deletions src/ert/config/forward_model_step.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from __future__ import annotations

import logging
from abc import abstractmethod
from dataclasses import dataclass, field
from typing import (
Dict,
List,
ClassVar,
Literal,
Optional,
TypedDict,
Expand Down Expand Up @@ -58,16 +59,16 @@ class ForwardModelStepJSON(TypedDict):
"""

name: str
executable: List[str]
executable: list[str]
target_file: str
error_file: str
start_file: str
stdout: str
stderr: str
stdin: str
argList: List[str]
environment: Dict[str, str]
exec_env: Dict[str, str]
argList: list[str]
environment: dict[str, str]
exec_env: dict[str, str]
max_running_minutes: int


Expand All @@ -79,9 +80,9 @@ class ForwardModelStepOptions(TypedDict, total=False):
target_file: NotRequired[str]
error_file: NotRequired[str]
max_running_minutes: NotRequired[int]
environment: NotRequired[Dict[str, Union[str, int]]]
exec_env: NotRequired[Dict[str, Union[str, int]]]
default_mapping: NotRequired[Dict[str, Union[str, int]]]
environment: NotRequired[dict[str, Union[str, int]]]
exec_env: NotRequired[dict[str, Union[str, int]]]
default_mapping: NotRequired[dict[str, Union[str, int]]]


@dataclass
Expand Down Expand Up @@ -153,16 +154,16 @@ class ForwardModelStep:
max_running_minutes: Optional[int] = None
min_arg: Optional[int] = None
max_arg: Optional[int] = None
arglist: List[str] = field(default_factory=list)
required_keywords: List[str] = field(default_factory=list)
arg_types: List[SchemaItemType] = field(default_factory=list)
environment: Dict[str, Union[int, str]] = field(default_factory=dict)
exec_env: Dict[str, Union[int, str]] = field(default_factory=dict)
default_mapping: Dict[str, Union[int, str]] = field(default_factory=dict)
arglist: list[str] = field(default_factory=list)
required_keywords: list[str] = field(default_factory=list)
arg_types: list[SchemaItemType] = field(default_factory=list)
environment: dict[str, Union[int, str]] = field(default_factory=dict)
exec_env: dict[str, Union[int, str]] = field(default_factory=dict)
default_mapping: dict[str, Union[int, str]] = field(default_factory=dict)
private_args: SubstitutionList = field(default_factory=SubstitutionList)
help_text: str = ""

default_env = {
default_env: ClassVar[dict[str, str]] = {
"_ERT_ITERATION_NUMBER": "<ITER>",
"_ERT_REALIZATION_NUMBER": "<IENS>",
"_ERT_RUNPATH": "<RUNPATH>",
Expand Down Expand Up @@ -210,7 +211,7 @@ def __post_init__(self) -> None:

class ForwardModelStepPlugin(ForwardModelStep):
def __init__(
self, name: str, command: List[str], **kwargs: Unpack[ForwardModelStepOptions]
self, name: str, command: list[str], **kwargs: Unpack[ForwardModelStepOptions]
):
if not kwargs:
kwargs = ForwardModelStepOptions()
Expand Down
2 changes: 1 addition & 1 deletion src/ert/config/gen_data_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def _read_file(filename: Path, report_step: int) -> xr.Dataset:
errors.append(str(err))
else:
for report_step in report_steps:
filename = input_file % report_step # noqa
filename = input_file % report_step
try:
datasets_per_report_step.append(
_read_file(_run_path / filename, report_step)
Expand Down
2 changes: 1 addition & 1 deletion src/ert/config/gen_kw_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,7 @@ def transform(self, array: npt.ArrayLike) -> npt.NDArray[np.float64]:
def _values_from_file(
realization: int, name_format: str, keys: List[str]
) -> npt.NDArray[np.double]:
file_name = name_format % realization # noqa
file_name = name_format % realization
df = pd.read_csv(file_name, sep=r"\s+", header=None)
# This means we have a key: value mapping in the
# file otherwise it is just a list of values
Expand Down
2 changes: 1 addition & 1 deletion src/ert/config/observations.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@

def history_key(key: str) -> str:
keyword, *rest = key.split(":")
return ":".join([keyword + "H"] + rest)
return ":".join([keyword + "H", *rest])


@dataclass
Expand Down
2 changes: 1 addition & 1 deletion src/ert/config/refcase.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def __eq__(self, other: object) -> bool:

@property
def all_dates(self) -> List[datetime]:
return [self.start_date] + list(self.dates)
return [self.start_date, *self.dates]

@classmethod
def from_config_dict(cls, config_dict: ConfigDict) -> Optional["Refcase"]:
Expand Down
2 changes: 1 addition & 1 deletion src/ert/config/workflow_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def __post_init__(self) -> None:

# Bare Exception here as we have no control
# of exceptions in the loaded ErtScript
except Exception as err: # noqa
except Exception as err:
raise ErtScriptLoadFailure(
f"Failed to load {self.name}: {err}"
) from err
Expand Down
4 changes: 2 additions & 2 deletions src/ert/dark_storage/enkf.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@


def get_storage() -> Storage:
global _storage # noqa: PLW0603e
global _storage
if _storage is None:
try:
return (_storage := open_storage(os.environ["ERT_STORAGE_ENS_PATH"]))
except RuntimeError as err:
raise InternalServerError(f"{str(err)}") from err
raise InternalServerError(f"{err!s}") from err
_storage.refresh()
return _storage
4 changes: 2 additions & 2 deletions src/ert/data/_measured_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def remove_failed_realizations(self) -> None:
standard deviations as-is."""
pre_index = self.data.index
post_index = list(self.data.dropna(axis=0, how="all").index)
drop_index = set(pre_index) - set(post_index + ["STD", "OBS"])
drop_index = set(pre_index) - {*post_index, "STD", "OBS"}
self._set_data(self.data.drop(index=drop_index))

def get_simulated_data(self) -> pd.DataFrame:
Expand Down Expand Up @@ -146,7 +146,7 @@ def _get_data(
measured_data.append(
pd.DataFrame(
data,
index=("OBS", "STD") + tuple(ds.realization.values),
index=("OBS", "STD", *ds.realization.values),
columns=pd.MultiIndex.from_tuples(
index_vals,
names=[None, "key_index", "data_index"],
Expand Down
2 changes: 1 addition & 1 deletion src/ert/ensemble_evaluator/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@
"EndEvent",
"Ensemble",
"EnsembleEvaluator",
"EnsembleSnapshot",
"EvaluatorServerConfig",
"FMStepSnapshot",
"FullSnapshotEvent",
"Monitor",
"Realization",
"RealizationSnapshot",
"EnsembleSnapshot",
"SnapshotUpdateEvent",
"wait_for_evaluator",
]
2 changes: 1 addition & 1 deletion src/ert/ensemble_evaluator/_ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def update_snapshot(self, events: Sequence[Event]) -> EnsembleSnapshot:
self.status = self._status_tracker.update_state(self.snapshot.status)
return snapshot_mutate_event

async def send_event( # noqa: PLR6301
async def send_event(
self,
url: str,
event: Event,
Expand Down
2 changes: 1 addition & 1 deletion src/ert/ensemble_evaluator/_wait_for_evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ async def wait_for_evaluator(
token: Optional[str] = None,
cert: Optional[Union[str, bytes]] = None,
healthcheck_endpoint: str = "/healthcheck",
timeout: Optional[float] = None, # noqa: ASYNC109
timeout: Optional[float] = None,
connection_timeout: float = 2,
) -> None:
if timeout is None:
Expand Down
2 changes: 1 addition & 1 deletion src/ert/ensemble_evaluator/evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ async def handle_dispatch(self, websocket: WebSocketServerProtocol) -> None:
# * job being killed due to MAX_RUNTIME
# * job being killed by user
logger.error(
f"a dispatcher abruptly closed a websocket: {str(connection_error)}"
f"a dispatcher abruptly closed a websocket: {connection_error!s}"
)

async def forward_checksum(self, event: Event) -> None:
Expand Down
4 changes: 2 additions & 2 deletions src/ert/ensemble_evaluator/snapshot.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ def get_real(self, real_id: RealId) -> "RealizationSnapshot":
return self._realization_snapshots[real_id]

def get_fm_step(self, real_id: RealId, fm_step_id: FmStepId) -> "FMStepSnapshot":
return self._fm_step_snapshots[(real_id, fm_step_id)].copy()
return self._fm_step_snapshots[real_id, fm_step_id].copy()

def get_successful_realizations(self) -> typing.List[int]:
return [
Expand Down Expand Up @@ -373,7 +373,7 @@ def update_fm_step(
fm_step_id: str,
fm_step: "FMStepSnapshot",
) -> "EnsembleSnapshot":
self._fm_step_snapshots[(real_id, fm_step_id)].update(fm_step)
self._fm_step_snapshots[real_id, fm_step_id].update(fm_step)
return self


Expand Down
12 changes: 6 additions & 6 deletions src/ert/gui/ertwidgets/analysismodulevariablespanel.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,11 @@ def __init__(self, analysis_module: AnalysisModule, ensemble_size: int):
analysis_module.__getattribute__(variable_name),
cast(
float,
[v for v in metadata.metadata if isinstance(v, Ge)][0].ge,
next(v for v in metadata.metadata if isinstance(v, Ge)).ge,
),
cast(
float,
[v for v in metadata.metadata if isinstance(v, Le)][0].le,
next(v for v in metadata.metadata if isinstance(v, Le)).le,
),
0.1,
),
Expand Down Expand Up @@ -92,9 +92,9 @@ def __init__(self, analysis_module: AnalysisModule, ensemble_size: int):
self.truncation_spinner = self.createDoubleSpinBox(
var_name,
analysis_module.enkf_truncation,
cast(float, [v for v in metadata.metadata if isinstance(v, Gt)][0].gt)
cast(float, next(v for v in metadata.metadata if isinstance(v, Gt)).gt)
+ 0.001,
cast(float, [v for v in metadata.metadata if isinstance(v, Le)][0].le),
cast(float, next(v for v in metadata.metadata if isinstance(v, Le)).le),
0.01,
)
self.truncation_spinner.setEnabled(False)
Expand Down Expand Up @@ -127,8 +127,8 @@ def __init__(self, analysis_module: AnalysisModule, ensemble_size: int):
self.local_spinner = self.createDoubleSpinBox(
var_name,
analysis_module.correlation_threshold(ensemble_size),
cast(float, [v for v in metadata.metadata if isinstance(v, Ge)][0].ge),
cast(float, [v for v in metadata.metadata if isinstance(v, Le)][0].le),
cast(float, next(v for v in metadata.metadata if isinstance(v, Ge)).ge),
cast(float, next(v for v in metadata.metadata if isinstance(v, Le)).le),
0.1,
)
self.local_spinner.setObjectName("localization_threshold")
Expand Down
Loading

0 comments on commit 2ad7010

Please sign in to comment.