diff --git a/capsul/application.py b/capsul/application.py index 08df5aaa..3f9f0c66 100644 --- a/capsul/application.py +++ b/capsul/application.py @@ -33,7 +33,7 @@ def _is_nipype_interface(obj): if "nipype.interfaces.base" not in sys.modules: return False npype_base = sys.modules["nipype.interfaces.base"] - NipypeInterface = getattr(npype_base, "Interface") + NipypeInterface = npype_base.Interface return isinstance(obj, NipypeInterface) @@ -48,7 +48,7 @@ def _is_nipype_interface_subclass(obj): if "nipype.interfaces.base" not in sys.modules: return False npype_base = sys.modules["nipype.interfaces.base"] - NipypeInterface = getattr(npype_base, "Interface") + NipypeInterface = npype_base.Interface return issubclass(obj, NipypeInterface) @@ -258,7 +258,7 @@ def executable(definition, **kwargs): raise TypeError( f"Class {definition} cannot be used to create a Process " "because its module cannot be imported : {e}" - ) + ) from e cls = getattr(module, object_name, None) if cls is not definition: raise TypeError( @@ -503,8 +503,7 @@ def wrap(self, context): for i in annotations if i != "result" and getattr(self, i, undefined) is not undefined } - result = function(**kwargs) - setattr(self, "result", result) + self.result = function(**kwargs) namespace = { "__annotations__": annotations, @@ -635,7 +634,7 @@ def _get_interface_class(): if nipype is None: _interface = type("Interface", (object,), {}) else: - _interface = getattr(nipype, "Interface") + _interface = nipype.Interface _nipype_loaded = True return _interface diff --git a/capsul/config/configuration.py b/capsul/config/configuration.py index 8336a320..d65f352f 100644 --- a/capsul/config/configuration.py +++ b/capsul/config/configuration.py @@ -195,8 +195,7 @@ def add_module(self, module_name, allow_existing=False): self.config_modules.append(module_name) if hasattr(cls, "module_dependencies"): - module_dependencies = getattr(cls, "module_dependencies") - for dependency in module_dependencies: + for dependency in cls.module_dependencies: self.add_module(dependency, allow_existing=True) def remove_module(self, module_name): @@ -506,9 +505,9 @@ def merge_configs(self): def available_modules(): module = sys.modules.get(__name__) mod_base = module.__name__.rsplit(".", 1)[0] - mod_path = getattr(module, "__file__") + mod_path = module.__file__ if mod_path is None: - mod_path = getattr(module, "__path__") + mod_path = module.__path__ mod_dir = os.path.dirname(mod_path) modules = [] for p in os.listdir(mod_dir): diff --git a/capsul/engine/__init__.py b/capsul/engine/__init__.py index 444583b8..46add13e 100644 --- a/capsul/engine/__init__.py +++ b/capsul/engine/__init__.py @@ -170,7 +170,7 @@ def engine_status(self): if engine_id: result["workers_count"] = database.workers_count(engine_id) result["connections"] = database.engine_connections(engine_id) - result["persistent"]: database.persistent(engine_id) + result["persistent"] = database.persistent(engine_id) return result def start_workers(self): diff --git a/capsul/execution_context.py b/capsul/execution_context.py index 2206ffd9..fe34824a 100644 --- a/capsul/execution_context.py +++ b/capsul/execution_context.py @@ -772,15 +772,15 @@ def find_temporary_to_generate(executable): field.generate_temporary = False if isinstance(node, ProcessIteration): node.process.field(field.name).generate_temporary = field.generate_temporary - for node, parameter in executable.get_linked_items( + for snode, parameter in executable.get_linked_items( node, field.name, direction="links_from", in_outer_pipelines=True ): if isinstance(node, ProcessIteration): - stack.append((node.process, node.process.field(parameter))) - # print('!temporaries! + ', node.process.full_name, ':', parameter) + stack.append((snode.process, snode.process.field(parameter))) + # print('!temporaries! + ', snode.process.full_name, ':', parameter) else: - stack.append((node, node.field(parameter))) - # print('!temporaries! + ', node.full_name, ':', parameter) + stack.append((snode, snode.field(parameter))) + # print('!temporaries! + ', snode.full_name, ':', parameter) # print('!temporaries! parameters with temporary') # for n, p in temporaries: diff --git a/capsul/pipeline/custom_nodes/cv_node.py b/capsul/pipeline/custom_nodes/cv_node.py index b0562805..9af9a17a 100644 --- a/capsul/pipeline/custom_nodes/cv_node.py +++ b/capsul/pipeline/custom_nodes/cv_node.py @@ -82,8 +82,8 @@ def params_to_command(self): def build_job( self, name=None, - referenced_input_files=[], - referenced_output_files=[], + referenced_input_files=None, + referenced_output_files=None, param_dict=None, ): from soma_workflow.custom_jobs import CrossValidationFoldJob @@ -97,6 +97,8 @@ def build_job( param_dict["test"] = self.test param_dict["nfolds"] = self.nfolds param_dict["fold"] = self.fold + referenced_input_files = referenced_input_files or [] + referenced_output_files = referenced_input_files or [] job = CrossValidationFoldJob( name=name, referenced_input_files=referenced_input_files, diff --git a/capsul/pipeline/custom_nodes/loo_node.py b/capsul/pipeline/custom_nodes/loo_node.py index a4d22b07..f1534cd8 100644 --- a/capsul/pipeline/custom_nodes/loo_node.py +++ b/capsul/pipeline/custom_nodes/loo_node.py @@ -118,8 +118,8 @@ def params_to_command(self): def build_job( self, name=None, - referenced_input_files=[], - referenced_output_files=[], + referenced_input_files=None, + referenced_output_files=None, param_dict=None, ): from soma_workflow.custom_jobs import LeaveOneOutJob @@ -134,6 +134,8 @@ def build_job( pass param_dict = dict(param_dict) param_dict["index"] = index + referenced_input_files = referenced_input_files or [] + referenced_output_files = referenced_output_files or [] job = LeaveOneOutJob( name=name, referenced_input_files=referenced_input_files, diff --git a/capsul/pipeline/custom_nodes/map_node.py b/capsul/pipeline/custom_nodes/map_node.py index 2517772d..1db56b5c 100644 --- a/capsul/pipeline/custom_nodes/map_node.py +++ b/capsul/pipeline/custom_nodes/map_node.py @@ -38,10 +38,12 @@ def __init__( self, pipeline, name, - input_names=["inputs"], - output_names=["output_%d"], + input_names=None, + output_names=None, input_types=None, ): + input_names = input_names or ["inputs"] + output_names = output_names or ["output_%d"] in_fields = [] out_fields = [{"name": "lengths", "optional": True}] @@ -153,8 +155,8 @@ def params_to_command(self): def build_job( self, name=None, - referenced_input_files=[], - referenced_output_files=[], + referenced_input_files=None, + referenced_output_files=None, param_dict=None, ): from soma_workflow.custom_jobs import MapJob @@ -170,6 +172,8 @@ def build_job( for i in range(len(value)): opname = output_name % i param_dict[opname] = getattr(self, opname) + referenced_input_files = referenced_input_files or [] + referenced_output_files = referenced_output_files or [] job = MapJob( name=name, referenced_input_files=referenced_input_files, diff --git a/capsul/pipeline/custom_nodes/reduce_node.py b/capsul/pipeline/custom_nodes/reduce_node.py index 7ddfdbde..f403c915 100644 --- a/capsul/pipeline/custom_nodes/reduce_node.py +++ b/capsul/pipeline/custom_nodes/reduce_node.py @@ -34,10 +34,12 @@ def __init__( self, pipeline, name, - input_names=["input_%d"], - output_names=["outputs"], + input_names=None, + output_names=None, input_types=None, ): + input_names = input_names or ["input_%d"] + output_names = output_names or ["outputs"] in_fields = [ {"name": "lengths", "optional": True}, {"name": "skip_empty", "optional": True}, @@ -209,8 +211,8 @@ def params_to_command(self): def build_job( self, name=None, - referenced_input_files=[], - referenced_output_files=[], + referenced_input_files=None, + referenced_output_files=None, param_dict=None, ): from soma_workflow.custom_jobs import MapJob @@ -227,6 +229,8 @@ def build_job( value = getattr(self, output_name, undefined) if value not in (None, undefined): param_dict[output_name] = value + referenced_input_files = referenced_input_files or [] + referenced_output_files = referenced_output_files or [] job = MapJob( name=name, referenced_input_files=referenced_input_files, diff --git a/capsul/pipeline/custom_nodes/strcat_node.py b/capsul/pipeline/custom_nodes/strcat_node.py index 34149b57..807b3f4d 100644 --- a/capsul/pipeline/custom_nodes/strcat_node.py +++ b/capsul/pipeline/custom_nodes/strcat_node.py @@ -24,7 +24,7 @@ def __init__( concat_plug, outputs, make_optional=(), - param_types={}, + param_types=None, ): """ Parameters @@ -46,6 +46,7 @@ def __init__( parameters types dict: {param_name: field_type_as_string} """ + param_types = param_types or {} node_inputs = [ dict(name=i, optional=(i in make_optional)) for i in params @@ -69,7 +70,8 @@ def __init__( self.cat_callback() self.set_callbacks() - def add_parameters(self, param_types={}): + def add_parameters(self, param_types=None): + param_types = param_types or {} added_fields = [self._concat_plug] for name in self._concat_sequence + added_fields: plug = self.plugs[name] @@ -133,8 +135,8 @@ def params_to_command(self): def build_job( self, name=None, - referenced_input_files=[], - referenced_output_files=[], + referenced_input_files=None, + referenced_output_files=None, param_dict=None, ): from soma_workflow.custom_jobs import StrCatJob @@ -147,6 +149,8 @@ def build_job( # [re] build the concatenated output self.cat_callback() param_dict[self._concat_plug] = getattr(self, self._concat_plug) + referenced_input_files = referenced_input_files or [] + referenced_output_files = referenced_output_files or [] job = StrCatJob( name=name, referenced_input_files=referenced_input_files, diff --git a/capsul/pipeline/pipeline.py b/capsul/pipeline/pipeline.py index 117b676a..d2fd5190 100644 --- a/capsul/pipeline/pipeline.py +++ b/capsul/pipeline/pipeline.py @@ -2460,10 +2460,10 @@ def __setitem__(self, path, value): def dispatch_value(self, node, name, value): """Propagate the value from a pipeline plug through links""" # print(f"!dispatch! {node.name}.{name} = {value}") - for node, plug in self.dispatch_plugs(node, name): - # print(f"!dispatch! -> {node.name}.{plug}") - if getattr(node, plug, undefined) != value: - setattr(node, plug, value) + for snode, plug in self.dispatch_plugs(node, name): + # print(f"!dispatch! -> {snode.name}.{plug}") + if getattr(snode, plug, undefined) != value: + setattr(snode, plug, value) def dispatch_plugs(self, node, name): """generator through linked plugs""" @@ -2669,7 +2669,8 @@ def node_from_path(self, path): class CustomPipeline(Pipeline): - def __init__(self, definition="custom_pipeline", json_executable={}): + def __init__(self, definition="custom_pipeline", json_executable=None): + json_executable = json_executable or {} object.__setattr__(self, "json_executable", json_executable) super().__init__( definition=definition, diff --git a/capsul/pipeline/pipeline_tools.py b/capsul/pipeline/pipeline_tools.py index f6ebeae8..af248c71 100644 --- a/capsul/pipeline/pipeline_tools.py +++ b/capsul/pipeline/pipeline_tools.py @@ -267,7 +267,7 @@ def pipeline_link_color(plug, link): def dot_graph_from_pipeline( - pipeline, nodes_sizes={}, use_nodes_pos=False, include_io=True, enlarge_boxes=0.0 + pipeline, nodes_sizes=None, use_nodes_pos=False, include_io=True, enlarge_boxes=0.0 ): """ Build a graphviz/dot-compatible representation of the pipeline. @@ -319,6 +319,7 @@ def _link_color(plug, link): active = plug.activated and link[3].activated return (0, 0, 0), style, active, link[4] + nodes_sizes = nodes_sizes or {} nodes = [] edges = {} has_outputs = False @@ -408,7 +409,7 @@ def _link_color(plug, link): def dot_graph_from_workflow( - pipeline, nodes_sizes={}, use_nodes_pos=False, enlarge_boxes=0.0 + pipeline, nodes_sizes=None, use_nodes_pos=False, enlarge_boxes=0.0 ): """ Build a graphviz/dot-compatible representation of the pipeline workflow. @@ -444,6 +445,7 @@ def dot_graph_from_workflow( :py:func:`save_dot_graph` """ + nodes_sizes = nodes_sizes or {} graph = pipeline.workflow_graph() nodes = [] edges = {} @@ -536,7 +538,7 @@ def _str_repr(item): def save_dot_image( pipeline, filename, - nodes_sizes={}, + nodes_sizes=None, use_nodes_pos=False, include_io=True, enlarge_boxes=0.0, @@ -585,6 +587,7 @@ def save_dot_image( **kwargs: additional attributes for the dot graph like nodesep=0.1 or rankdir="TB" """ + nodes_sizes = nodes_sizes or {} if workflow: dgraph = dot_graph_from_workflow( pipeline, @@ -1382,7 +1385,7 @@ def __init__(self, value): self.value = value def __repr__(self): - return "lambda: %s" % repr(value) + return "lambda: %s" % repr(self.value) meta["default_factory"] = def_fac(field.default_factory()) has_default = True diff --git a/capsul/pipeline/python_export.py b/capsul/pipeline/python_export.py index b1f2220d..f35dac55 100644 --- a/capsul/pipeline/python_export.py +++ b/capsul/pipeline/python_export.py @@ -139,7 +139,7 @@ def _write_process(process, pyf, name, enabled, skip_invalid): if isinstance(snode, Pipeline): sself_str = '%s.nodes["%s"]' % (self_str, "%s") - for node_name, snode in snode.nodes.items(): + for node_name, _ in snode.nodes.items(): scnode = cnode.nodes[node_name] if node_name == "": diff --git a/capsul/pipeline/test/test_activation.py b/capsul/pipeline/test/test_activation.py index c89eead2..dca53cc7 100644 --- a/capsul/pipeline/test/test_activation.py +++ b/capsul/pipeline/test/test_activation.py @@ -131,7 +131,7 @@ def run_unactivation_tests_2(self): if not app: app = QtGui.QApplication(sys.argv) pipeline = executable(MyPipeline) - setattr(pipeline.nodes_activation, "way11", False) + pipeline.nodes_activation.way11 = False view1 = PipelineDeveloperView(pipeline) view1.show() app.exec_() diff --git a/capsul/pipeline/test/test_complex_pipeline_activations.py b/capsul/pipeline/test/test_complex_pipeline_activations.py index c0d63471..09cc31f4 100644 --- a/capsul/pipeline/test/test_complex_pipeline_activations.py +++ b/capsul/pipeline/test/test_complex_pipeline_activations.py @@ -857,10 +857,10 @@ def test_complex_activations(self): node_name = split[-1] try: node = node_pipeline.nodes[node_name] - except KeyError: + except KeyError as e: raise KeyError( f"Pipeline {node_pipeline.pipeline} has no node named {node_name}" - ) + ) from e try: what = "activation of node {0}".format( full_node_name or "main pipeline node" @@ -876,10 +876,10 @@ def test_complex_activations(self): if expected is not None: got = node.enabled self.assertEqual(expected, got) - except AssertionError: + except AssertionError as e: raise AssertionError( f"Wrong activation within ComplexPipeline with parameters {kwargs}: {what} is supposed to be {expected} but is {got}" - ) + ) from e if __name__ == "__main__": diff --git a/capsul/pipeline/test/test_pipeline.py b/capsul/pipeline/test/test_pipeline.py index 866ea798..559d24ec 100644 --- a/capsul/pipeline/test/test_pipeline.py +++ b/capsul/pipeline/test/test_pipeline.py @@ -123,7 +123,7 @@ def test_enabled(self): self.assertEqual(workflow_repr, "") def test_run_pipeline(self): - setattr(self.pipeline.nodes_activation, "node2", True) + self.pipeline.nodes_activation.node2 = True tmp = tempfile.mkstemp("", prefix="capsul_test_pipeline") self.temp_files.append(tmp[1]) ofile = tmp[1] @@ -220,7 +220,7 @@ def test_pipeline_json(self): app = Qt.QApplication(sys.argv) pipeline = executable(MyPipeline) - setattr(pipeline.nodes_activation, "node2", True) + pipeline.nodes_activation.node2 = True view1 = PipelineDeveloperView(pipeline, allow_open_controller=True) view1.show() app.exec_() diff --git a/capsul/pipeline/test/test_pipeline_parameters.py b/capsul/pipeline/test/test_pipeline_parameters.py index bab34884..49fbcd92 100644 --- a/capsul/pipeline/test/test_pipeline_parameters.py +++ b/capsul/pipeline/test/test_pipeline_parameters.py @@ -738,7 +738,7 @@ def create_pipeline(): with self.capsul.engine() as ce: ce.run(pipeline1, timeout=5) - pipeline1.out + assert pipeline1.out save_pipeline_parameters(self.path, pipeline1) diff --git a/capsul/process/node.py b/capsul/process/node.py index 56e0029a..e8fd991e 100644 --- a/capsul/process/node.py +++ b/capsul/process/node.py @@ -146,7 +146,7 @@ class Node(Controller): ) def __init__( - self, definition=None, pipeline=None, name=None, inputs={}, outputs={} + self, definition=None, pipeline=None, name=None, inputs=None, outputs=None ): """Generate a Node @@ -174,7 +174,7 @@ def __init__( the pipeline object where the node is added name: str the node name - inputs: list of dict + inputs: dict a list of input parameters containing a dictionary with default values (mandatory key: name) outputs: dict @@ -224,6 +224,7 @@ def __init__( # generate a list with all the inputs and outputs # the second parameter (parameter_type) is False for an input, # True for an output + inputs = inputs or {} parameters = list( zip( inputs, @@ -233,6 +234,7 @@ def __init__( * len(inputs), ) ) + outputs = outputs or {} parameters.extend( list( zip( diff --git a/capsul/process/process.py b/capsul/process/process.py index c17fe04b..d46b1dbb 100644 --- a/capsul/process/process.py +++ b/capsul/process/process.py @@ -663,7 +663,8 @@ def _move_outputs(self): del self._former_output_directory return output_values - def _move_files(self, src_directory, dst_directory, value, moved_dict={}): + def _move_files(self, src_directory, dst_directory, value, moved_dict=None): + moved_dict = moved_dict or {} if isinstance(value, (list, tuple)): new_value = [ self._move_files(src_directory, dst_directory, item, moved_dict) @@ -1109,7 +1110,7 @@ class Smooth(NipypeProcess): self.synchronize = sc.Event() # use the nipype doc for help - doc = getattr(nipype_instance, "__doc__") + doc = nipype_instance.__doc__ if doc: self.__doc__ = doc diff --git a/capsul/qt_gui/widgets/pipeline_developer_view.py b/capsul/qt_gui/widgets/pipeline_developer_view.py index 5168b6e7..2dd442de 100644 --- a/capsul/qt_gui/widgets/pipeline_developer_view.py +++ b/capsul/qt_gui/widgets/pipeline_developer_view.py @@ -253,7 +253,7 @@ def __init__( sub_pipeline=None, colored_parameters=True, logical_view=False, - labels=[], + labels=None, show_opt_inputs=True, show_opt_outputs=True, userlevel=0, @@ -283,7 +283,7 @@ def __init__( if getattr(field, "hidden", False): show = False elif getattr(field, "userlevel", None) is not None: - if getattr(field, "userlevel") > self.userlevel: + if field.userlevel > self.userlevel: show = False if show: self.parameters[pname] = param @@ -305,7 +305,7 @@ def __init__( self.show_opt_outputs = show_opt_outputs self.labels = [] - self.scene_labels = labels + self.scene_labels = labels or [] self.label_items = [] my_labels = [] steps = getattr(pipeline, "pipeline_steps", None) @@ -392,7 +392,7 @@ def update_parameters_now(self): if getattr(param, "hidden", False): show = False elif getattr(param, "userlevel", None) is not None: - if getattr(param, "userlevel") > self.userlevel: + if param.userlevel > self.userlevel: show = False if show: self.parameters[pname] = self.process.plugs[pname] @@ -1848,7 +1848,7 @@ def _remove_link(self, source_dest): self.removeItem(glink) del self.glinks[new_source_dest] - def update_paths(self, regions=[]): + def update_paths(self, regions=None): for name, i in self.gnodes.items(): self.pos[i.name] = i.pos() br = i.box.boundingRect() @@ -3950,7 +3950,7 @@ def reset_initial_nodes_positions(self): # gnode.update(0,0,*dimension) # ##################################################### - pos = getattr(scene.pipeline, "node_position") + pos = scene.pipeline.node_position if pos is not None: scene.pos = pos for node, position in pos.items(): @@ -4820,7 +4820,7 @@ def show_node_doc(self, node): doc_path = "file://%s" % os.path.abspath(doc_path) doc_browser.setUrl(Qt.QUrl(doc_path)) else: - gethelp = getattr(node, "get_help") + gethelp = node.get_help msg = None if gethelp: msg = node.get_help(returnhelp=True) diff --git a/capsul/test/test_capsul_notebooks.py b/capsul/test/test_capsul_notebooks.py index d61e0678..21e98976 100644 --- a/capsul/test/test_capsul_notebooks.py +++ b/capsul/test/test_capsul_notebooks.py @@ -8,10 +8,10 @@ try: import nbformat from jupyter_core.command import main as main_jupyter -except ImportError: +except ImportError as e: raise ImportError( "cannot import nbformat and/or jupyter_core.command: " "cannot test notebooks" - ) + ) from e from soma.test_utils import test_notebook as tnb diff --git a/doc/source/conf.py b/doc/source/conf.py index df659dc6..89349098 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -19,8 +19,8 @@ # Doc generation depends on being able to import capsul try: import capsul -except ImportError: - raise RuntimeError("Cannot import CAPSUL, please investigate") +except ImportError as e: + raise RuntimeError("Cannot import CAPSUL, please investigate") from e from distutils.version import LooseVersion