Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix ruff/flake8-bugbear issues #356

Merged
merged 8 commits into from
Mar 4, 2024
11 changes: 5 additions & 6 deletions capsul/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def _is_nipype_interface(obj):
if "nipype.interfaces.base" not in sys.modules:
return False
npype_base = sys.modules["nipype.interfaces.base"]
NipypeInterface = getattr(npype_base, "Interface")
NipypeInterface = npype_base.Interface
return isinstance(obj, NipypeInterface)


Expand All @@ -48,7 +48,7 @@ def _is_nipype_interface_subclass(obj):
if "nipype.interfaces.base" not in sys.modules:
return False
npype_base = sys.modules["nipype.interfaces.base"]
NipypeInterface = getattr(npype_base, "Interface")
NipypeInterface = npype_base.Interface
return issubclass(obj, NipypeInterface)


Expand Down Expand Up @@ -258,7 +258,7 @@ def executable(definition, **kwargs):
raise TypeError(
f"Class {definition} cannot be used to create a Process "
"because its module cannot be imported : {e}"
)
) from e
cls = getattr(module, object_name, None)
if cls is not definition:
raise TypeError(
Expand Down Expand Up @@ -503,8 +503,7 @@ def wrap(self, context):
for i in annotations
if i != "result" and getattr(self, i, undefined) is not undefined
}
result = function(**kwargs)
setattr(self, "result", result)
self.result = function(**kwargs)

namespace = {
"__annotations__": annotations,
Expand Down Expand Up @@ -635,7 +634,7 @@ def _get_interface_class():
if nipype is None:
_interface = type("Interface", (object,), {})
else:
_interface = getattr(nipype, "Interface")
_interface = nipype.Interface
_nipype_loaded = True
return _interface

Expand Down
7 changes: 3 additions & 4 deletions capsul/config/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,7 @@ def add_module(self, module_name, allow_existing=False):
self.config_modules.append(module_name)

if hasattr(cls, "module_dependencies"):
module_dependencies = getattr(cls, "module_dependencies")
for dependency in module_dependencies:
for dependency in cls.module_dependencies:
self.add_module(dependency, allow_existing=True)

def remove_module(self, module_name):
Expand Down Expand Up @@ -506,9 +505,9 @@ def merge_configs(self):
def available_modules():
module = sys.modules.get(__name__)
mod_base = module.__name__.rsplit(".", 1)[0]
mod_path = getattr(module, "__file__")
mod_path = module.__file__
if mod_path is None:
mod_path = getattr(module, "__path__")
mod_path = module.__path__
mod_dir = os.path.dirname(mod_path)
modules = []
for p in os.listdir(mod_dir):
Expand Down
2 changes: 1 addition & 1 deletion capsul/engine/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def engine_status(self):
if engine_id:
result["workers_count"] = database.workers_count(engine_id)
result["connections"] = database.engine_connections(engine_id)
result["persistent"]: database.persistent(engine_id)
result["persistent"] = database.persistent(engine_id)
return result

def start_workers(self):
Expand Down
10 changes: 5 additions & 5 deletions capsul/execution_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,15 +772,15 @@ def find_temporary_to_generate(executable):
field.generate_temporary = False
if isinstance(node, ProcessIteration):
node.process.field(field.name).generate_temporary = field.generate_temporary
for node, parameter in executable.get_linked_items(
for snode, parameter in executable.get_linked_items(
node, field.name, direction="links_from", in_outer_pipelines=True
):
if isinstance(node, ProcessIteration):
stack.append((node.process, node.process.field(parameter)))
# print('!temporaries! + ', node.process.full_name, ':', parameter)
stack.append((snode.process, snode.process.field(parameter)))
# print('!temporaries! + ', snode.process.full_name, ':', parameter)
else:
stack.append((node, node.field(parameter)))
# print('!temporaries! + ', node.full_name, ':', parameter)
stack.append((snode, snode.field(parameter)))
# print('!temporaries! + ', snode.full_name, ':', parameter)

# print('!temporaries! parameters with temporary')
# for n, p in temporaries:
Expand Down
6 changes: 4 additions & 2 deletions capsul/pipeline/custom_nodes/cv_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ def params_to_command(self):
def build_job(
self,
name=None,
referenced_input_files=[],
referenced_output_files=[],
referenced_input_files=None,
referenced_output_files=None,
param_dict=None,
):
from soma_workflow.custom_jobs import CrossValidationFoldJob
Expand All @@ -97,6 +97,8 @@ def build_job(
param_dict["test"] = self.test
param_dict["nfolds"] = self.nfolds
param_dict["fold"] = self.fold
referenced_input_files = referenced_input_files or []
referenced_output_files = referenced_input_files or []
job = CrossValidationFoldJob(
name=name,
referenced_input_files=referenced_input_files,
Expand Down
6 changes: 4 additions & 2 deletions capsul/pipeline/custom_nodes/loo_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,8 +118,8 @@ def params_to_command(self):
def build_job(
self,
name=None,
referenced_input_files=[],
referenced_output_files=[],
referenced_input_files=None,
referenced_output_files=None,
param_dict=None,
):
from soma_workflow.custom_jobs import LeaveOneOutJob
Expand All @@ -134,6 +134,8 @@ def build_job(
pass
param_dict = dict(param_dict)
param_dict["index"] = index
referenced_input_files = referenced_input_files or []
referenced_output_files = referenced_output_files or []
job = LeaveOneOutJob(
name=name,
referenced_input_files=referenced_input_files,
Expand Down
12 changes: 8 additions & 4 deletions capsul/pipeline/custom_nodes/map_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,12 @@ def __init__(
self,
pipeline,
name,
input_names=["inputs"],
output_names=["output_%d"],
input_names=None,
output_names=None,
input_types=None,
):
input_names = input_names or ["inputs"]
output_names = output_names or ["output_%d"]
in_fields = []
out_fields = [{"name": "lengths", "optional": True}]

Expand Down Expand Up @@ -153,8 +155,8 @@ def params_to_command(self):
def build_job(
self,
name=None,
referenced_input_files=[],
referenced_output_files=[],
referenced_input_files=None,
referenced_output_files=None,
param_dict=None,
):
from soma_workflow.custom_jobs import MapJob
Expand All @@ -170,6 +172,8 @@ def build_job(
for i in range(len(value)):
opname = output_name % i
param_dict[opname] = getattr(self, opname)
referenced_input_files = referenced_input_files or []
referenced_output_files = referenced_output_files or []
job = MapJob(
name=name,
referenced_input_files=referenced_input_files,
Expand Down
12 changes: 8 additions & 4 deletions capsul/pipeline/custom_nodes/reduce_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,12 @@ def __init__(
self,
pipeline,
name,
input_names=["input_%d"],
output_names=["outputs"],
input_names=None,
output_names=None,
input_types=None,
):
input_names = input_names or ["input_%d"]
output_names = output_names or ["outputs"]
in_fields = [
{"name": "lengths", "optional": True},
{"name": "skip_empty", "optional": True},
Expand Down Expand Up @@ -209,8 +211,8 @@ def params_to_command(self):
def build_job(
self,
name=None,
referenced_input_files=[],
referenced_output_files=[],
referenced_input_files=None,
referenced_output_files=None,
param_dict=None,
):
from soma_workflow.custom_jobs import MapJob
Expand All @@ -227,6 +229,8 @@ def build_job(
value = getattr(self, output_name, undefined)
if value not in (None, undefined):
param_dict[output_name] = value
referenced_input_files = referenced_input_files or []
referenced_output_files = referenced_output_files or []
job = MapJob(
name=name,
referenced_input_files=referenced_input_files,
Expand Down
12 changes: 8 additions & 4 deletions capsul/pipeline/custom_nodes/strcat_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def __init__(
concat_plug,
outputs,
make_optional=(),
param_types={},
param_types=None,
):
"""
Parameters
Expand All @@ -46,6 +46,7 @@ def __init__(
parameters types dict: {param_name: field_type_as_string}

"""
param_types = param_types or {}
node_inputs = [
dict(name=i, optional=(i in make_optional))
for i in params
Expand All @@ -69,7 +70,8 @@ def __init__(
self.cat_callback()
self.set_callbacks()

def add_parameters(self, param_types={}):
def add_parameters(self, param_types=None):
param_types = param_types or {}
added_fields = [self._concat_plug]
for name in self._concat_sequence + added_fields:
plug = self.plugs[name]
Expand Down Expand Up @@ -133,8 +135,8 @@ def params_to_command(self):
def build_job(
self,
name=None,
referenced_input_files=[],
referenced_output_files=[],
referenced_input_files=None,
referenced_output_files=None,
param_dict=None,
):
from soma_workflow.custom_jobs import StrCatJob
Expand All @@ -147,6 +149,8 @@ def build_job(
# [re] build the concatenated output
self.cat_callback()
param_dict[self._concat_plug] = getattr(self, self._concat_plug)
referenced_input_files = referenced_input_files or []
referenced_output_files = referenced_output_files or []
job = StrCatJob(
name=name,
referenced_input_files=referenced_input_files,
Expand Down
11 changes: 6 additions & 5 deletions capsul/pipeline/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -2460,10 +2460,10 @@ def __setitem__(self, path, value):
def dispatch_value(self, node, name, value):
"""Propagate the value from a pipeline plug through links"""
# print(f"!dispatch! {node.name}.{name} = {value}")
for node, plug in self.dispatch_plugs(node, name):
# print(f"!dispatch! -> {node.name}.{plug}")
if getattr(node, plug, undefined) != value:
setattr(node, plug, value)
for snode, plug in self.dispatch_plugs(node, name):
# print(f"!dispatch! -> {snode.name}.{plug}")
if getattr(snode, plug, undefined) != value:
setattr(snode, plug, value)

def dispatch_plugs(self, node, name):
"""generator through linked plugs"""
Expand Down Expand Up @@ -2669,7 +2669,8 @@ def node_from_path(self, path):


class CustomPipeline(Pipeline):
def __init__(self, definition="custom_pipeline", json_executable={}):
def __init__(self, definition="custom_pipeline", json_executable=None):
json_executable = json_executable or {}
object.__setattr__(self, "json_executable", json_executable)
super().__init__(
definition=definition,
Expand Down
11 changes: 7 additions & 4 deletions capsul/pipeline/pipeline_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def pipeline_link_color(plug, link):


def dot_graph_from_pipeline(
pipeline, nodes_sizes={}, use_nodes_pos=False, include_io=True, enlarge_boxes=0.0
pipeline, nodes_sizes=None, use_nodes_pos=False, include_io=True, enlarge_boxes=0.0
):
"""
Build a graphviz/dot-compatible representation of the pipeline.
Expand Down Expand Up @@ -319,6 +319,7 @@ def _link_color(plug, link):
active = plug.activated and link[3].activated
return (0, 0, 0), style, active, link[4]

nodes_sizes = nodes_sizes or {}
nodes = []
edges = {}
has_outputs = False
Expand Down Expand Up @@ -408,7 +409,7 @@ def _link_color(plug, link):


def dot_graph_from_workflow(
pipeline, nodes_sizes={}, use_nodes_pos=False, enlarge_boxes=0.0
pipeline, nodes_sizes=None, use_nodes_pos=False, enlarge_boxes=0.0
):
"""
Build a graphviz/dot-compatible representation of the pipeline workflow.
Expand Down Expand Up @@ -444,6 +445,7 @@ def dot_graph_from_workflow(
:py:func:`save_dot_graph`
"""

nodes_sizes = nodes_sizes or {}
graph = pipeline.workflow_graph()
nodes = []
edges = {}
Expand Down Expand Up @@ -536,7 +538,7 @@ def _str_repr(item):
def save_dot_image(
pipeline,
filename,
nodes_sizes={},
nodes_sizes=None,
use_nodes_pos=False,
include_io=True,
enlarge_boxes=0.0,
Expand Down Expand Up @@ -585,6 +587,7 @@ def save_dot_image(
**kwargs: additional attributes for the dot graph
like nodesep=0.1 or rankdir="TB"
"""
nodes_sizes = nodes_sizes or {}
if workflow:
dgraph = dot_graph_from_workflow(
pipeline,
Expand Down Expand Up @@ -1382,7 +1385,7 @@ def __init__(self, value):
self.value = value

def __repr__(self):
return "lambda: %s" % repr(value)
return "lambda: %s" % repr(self.value)

meta["default_factory"] = def_fac(field.default_factory())
has_default = True
Expand Down
2 changes: 1 addition & 1 deletion capsul/pipeline/python_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def _write_process(process, pyf, name, enabled, skip_invalid):

if isinstance(snode, Pipeline):
sself_str = '%s.nodes["%s"]' % (self_str, "%s")
for node_name, snode in snode.nodes.items():
for node_name, _ in snode.nodes.items():
scnode = cnode.nodes[node_name]

if node_name == "":
Expand Down
2 changes: 1 addition & 1 deletion capsul/pipeline/test/test_activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def run_unactivation_tests_2(self):
if not app:
app = QtGui.QApplication(sys.argv)
pipeline = executable(MyPipeline)
setattr(pipeline.nodes_activation, "way11", False)
pipeline.nodes_activation.way11 = False
view1 = PipelineDeveloperView(pipeline)
view1.show()
app.exec_()
Expand Down
8 changes: 4 additions & 4 deletions capsul/pipeline/test/test_complex_pipeline_activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -857,10 +857,10 @@ def test_complex_activations(self):
node_name = split[-1]
try:
node = node_pipeline.nodes[node_name]
except KeyError:
except KeyError as e:
raise KeyError(
f"Pipeline {node_pipeline.pipeline} has no node named {node_name}"
)
) from e
try:
what = "activation of node {0}".format(
full_node_name or "main pipeline node"
Expand All @@ -876,10 +876,10 @@ def test_complex_activations(self):
if expected is not None:
got = node.enabled
self.assertEqual(expected, got)
except AssertionError:
except AssertionError as e:
raise AssertionError(
f"Wrong activation within ComplexPipeline with parameters {kwargs}: {what} is supposed to be {expected} but is {got}"
)
) from e


if __name__ == "__main__":
Expand Down
Loading
Loading