diff --git a/.flake8 b/.flake8 index ef0865e8..68ddec59 100644 --- a/.flake8 +++ b/.flake8 @@ -8,7 +8,14 @@ exclude = venv, per-file-ignores = # - docstrings rules that should not be applied to tests - tests/*: D100, D103, D104 + **/test_*: D100, D103, D104 + __init__.py:F401 # allow "weird indentation" tests/test_workflow_*.py: D100, D103, D104, E131, E127, E501 -max-line-length = 90 +docstring-convention = numpy +max_complexity = 12 +max_function_length = 150 +# for compatibility with black +# https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#flake8 +max-line-length = 88 +extend-ignore = D105, E203, E704 diff --git a/.github/workflows/run_tests_new.yml b/.github/workflows/run_tests_new.yml new file mode 100644 index 00000000..77e28e91 --- /dev/null +++ b/.github/workflows/run_tests_new.yml @@ -0,0 +1,49 @@ +--- +name: Run tests + +on: + push: + branches: ['*'] + pull_request: + branches: ['*'] + +# cancel previous runs if new one is triggered +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + + test: + + runs-on: ubuntu-latest + + # only trigger on upstream repo + if: github.repository_owner == 'neurodatascience' && github.event.repository.name == 'nipoppy' + + steps: + + - uses: actions/checkout@v3 + with: + submodules: recursive + + - uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install package + run: | + cd nipoppy_cli + pip install -U pip + pip install .[tests] + + - name: Run tests + run: | + python -m pytest nipoppy_cli --cov=nipoppy_cli/nipoppy --cov-report=xml + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + file: ./coverage.xml + name: codecov-umbrella + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore index e4ca476c..f877086d 100644 --- a/.gitignore +++ b/.gitignore @@ -30,3 +30,6 @@ htmlcov env/ + +# VS Code +.vscode/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 39a9029b..8629666f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,9 +3,8 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-ast - id: check-case-conflict @@ -15,9 +14,23 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - id: check-toml - +- repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort +- repo: https://github.com/psf/black-pre-commit-mirror + rev: 23.12.1 + hooks: + - id: black + args: [--config=pyproject.toml] +- repo: https://github.com/pyCQA/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + args: [--verbose, --config, .flake8] + additional_dependencies: [flake8-docstrings] - repo: https://github.com/codespell-project/codespell - rev: v2.2.5 + rev: v2.2.6 hooks: - id: codespell args: [--toml=pyproject.toml] diff --git a/nipoppy_cli/LICENSE b/nipoppy_cli/LICENSE new file mode 100644 index 00000000..5aed51ac --- /dev/null +++ b/nipoppy_cli/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 NeuroDataScience + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/nipoppy_cli/README.md b/nipoppy_cli/README.md new file mode 100644 index 00000000..8c731ae2 --- /dev/null +++ b/nipoppy_cli/README.md @@ -0,0 +1,26 @@ +![License](https://img.shields.io/badge/license-MIT-blue.svg) +[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.8084759.svg)](https://doi.org/10.5281/zenodo.8084759) +![https://github.com/psf/black](https://img.shields.io/badge/code%20style-black-000000.svg) + +# Nipoppy + +A framework for standardized organization and processing of neuroimaging-clinical datasets. + +## Developer setup + +Clone this repo: + +```bash +git clone https://github.com/neurodatascience/nipoppy.git +``` + +Install `nipoppy` in editable mode with `dev` dependencies: +```bash +cd nipoppy +pip install -e .[dev] +``` + +Set up `pre-commit`: +```bash +pre-commit install +``` diff --git a/nipoppy_cli/nipoppy/__init__.py b/nipoppy_cli/nipoppy/__init__.py new file mode 100644 index 00000000..d08acd55 --- /dev/null +++ b/nipoppy_cli/nipoppy/__init__.py @@ -0,0 +1 @@ +"""Nipoppy.""" diff --git a/nipoppy_cli/nipoppy/base.py b/nipoppy_cli/nipoppy/base.py new file mode 100644 index 00000000..777aafd8 --- /dev/null +++ b/nipoppy_cli/nipoppy/base.py @@ -0,0 +1,71 @@ +"""Base class.""" + +import inspect +from abc import ABC +from typing import Optional, Sequence + + +class Base(ABC): + """Base class with utilities for pretty string representations.""" + + def _str_helper( + self, + components: Optional[Sequence] = None, + names: Optional[Sequence[str]] = None, + sep=", ", + ) -> str: + """Generate a custom string representation of an object. + + The output string is of the form: ClassName(component1[sep]component2[sep]...) + + Parameters + ---------- + components : Sequence, optional + Components to concatenate, by default None + names : Sequence[str], optional + Name of attributes to be added to the components as key-value pairs, + by default None + sep : str, optional + Separator between components, by default ", " + + Returns + ------- + str + String representation of the object. + """ + if components is None: + components = [] + + if names is not None: + for name in names: + components.append(f"{name}={getattr(self, name)}") + + return f"{type(self).__name__}({sep.join([str(c) for c in components])})" + + def __str__(self) -> str: + """Return a string representation of the object based on its __init__ arguments. + + Raises + ------ + RuntimeError + If the parameter names obtained from the __init__ method do not match the + attributes of the object. + """ + signature = inspect.signature(type(self)) + names = [ + name + for name, parameter in signature.parameters.items() + if parameter.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD + ] + try: + return self._str_helper(names=names) + except AttributeError: + raise RuntimeError( + f"The __init__ method of the {type(self)} class has positional and/or" + " keyword arguments that are not set as attributes of the object" + ". Failed to build string representation: need to override the" + " __str__ method" + ) + + def __repr__(self) -> str: + return self.__str__() diff --git a/nipoppy_cli/nipoppy/cli/__init__.py b/nipoppy_cli/nipoppy/cli/__init__.py new file mode 100644 index 00000000..6aa21481 --- /dev/null +++ b/nipoppy_cli/nipoppy/cli/__init__.py @@ -0,0 +1 @@ +"""Parsers and entrypoint for the command-line interface.""" diff --git a/nipoppy_cli/nipoppy/cli/parser.py b/nipoppy_cli/nipoppy/cli/parser.py new file mode 100644 index 00000000..2a8f4912 --- /dev/null +++ b/nipoppy_cli/nipoppy/cli/parser.py @@ -0,0 +1,316 @@ +"""Parsers for the CLI.""" + +import logging +from argparse import ArgumentParser, HelpFormatter, _ActionsContainer, _SubParsersAction +from pathlib import Path + +from nipoppy.utils import ( + BIDS_SESSION_PREFIX, + BIDS_SUBJECT_PREFIX, + check_participant, + check_session, +) + +PROGRAM_NAME = "nipoppy" +COMMAND_INIT = "init" +COMMAND_DOUGHNUT = "doughnut" +COMMAND_DICOM_REORG = "reorg" +COMMAND_BIDS_CONVERSION = "bidsify" +COMMAND_PIPELINE_RUN = "run" +COMMAND_PIPELINE_TRACK = "track" + +DEFAULT_VERBOSITY = "2" # info +VERBOSITY_TO_LOG_LEVEL_MAP = { + "0": logging.ERROR, + "1": logging.WARNING, + "2": logging.INFO, + "3": logging.DEBUG, +} + + +def add_arg_dataset_root(parser: _ActionsContainer) -> _ActionsContainer: + """Add a --dataset-root argument to the parser.""" + parser.add_argument( + "--dataset-root", + type=Path, + required=True, + help="Path to the root of the dataset.", + ) + return parser + + +def add_arg_simulate(parser: _ActionsContainer) -> _ActionsContainer: + """Add a --simulate argument to the parser.""" + parser.add_argument( + "--simulate", + action="store_true", + help="Simulate the pipeline run without executing the generated command-line.", + ) + return parser + + +def add_args_participant_and_session(parser: _ActionsContainer) -> _ActionsContainer: + """Add --participant and --session arguments to the parser.""" + parser.add_argument( + "--participant", + type=check_participant, + required=False, + help=f"Participant ID (with or without the {BIDS_SUBJECT_PREFIX} prefix).", + ) + parser.add_argument( + "--session", + type=check_session, + required=False, + help=f"Session ID (with or without the {BIDS_SESSION_PREFIX} prefix).", + ) + return parser + + +def add_args_pipeline(parser: _ActionsContainer) -> _ActionsContainer: + """Add pipeline-related arguments to the parser.""" + parser.add_argument( + "--pipeline", + type=str, + required=True, + help="Pipeline name.", + ) + parser.add_argument( + "--pipeline-version", + type=str, + required=False, + help="Pipeline version.", + ) + return parser + + +def add_arg_layout(parser: _ActionsContainer) -> _ActionsContainer: + """Add a --layout argument to the parser.""" + parser.add_argument( + "--layout", + dest="fpath_layout", + type=Path, + required=False, + help=( + "Path to a custom layout specification file" + ", to be used instead of the default layout." + ), # TODO point to example + ) + + +def add_arg_dry_run(parser: _ActionsContainer) -> _ActionsContainer: + """Add a --dry-run argument to the parser.""" + parser.add_argument( + "--dry-run", + action="store_true", + help="Print commands but do not execute them.", + ) + return parser + + +def add_arg_help(parser: _ActionsContainer) -> _ActionsContainer: + """Add a --help argument to the parser.""" + parser.add_argument( + "-h", + "--help", + action="help", + help="Show this help message and exit.", + ) + return parser + + +def add_arg_verbosity(parser: _ActionsContainer) -> _ActionsContainer: + """Add a --verbosity argument to the parser.""" + + def _verbosity_to_log_level(verbosity: str): + try: + return VERBOSITY_TO_LOG_LEVEL_MAP[verbosity] + except KeyError: + parser.error( + f"Invalid verbosity level: {verbosity}." + f" Valid levels are {list(VERBOSITY_TO_LOG_LEVEL_MAP.keys())}." + ) + + parser.add_argument( + "--verbosity", + type=_verbosity_to_log_level, + default=DEFAULT_VERBOSITY, + help=( + "Verbosity level, from 0 (least verbose) to 3 (most verbose)." + f" Default: {DEFAULT_VERBOSITY}." + ), + ) + return parser + + +def add_subparser_init( + subparsers: _SubParsersAction, + formatter_class: type[HelpFormatter] = HelpFormatter, +) -> ArgumentParser: + """Add subparser for init command.""" + description = "Initialize a new dataset." + parser = subparsers.add_parser( + COMMAND_INIT, + description=description, + help=description, + formatter_class=formatter_class, + add_help=False, + ) + parser = add_arg_dataset_root(parser) + return parser + + +def add_subparser_doughnut( + subparsers: _SubParsersAction, + formatter_class: type[HelpFormatter] = HelpFormatter, +) -> ArgumentParser: + """Add subparser for doughnut command.""" + description = "Create/update a dataset's doughnut file." + parser = subparsers.add_parser( + COMMAND_DOUGHNUT, + description=description, + help=description, + formatter_class=formatter_class, + add_help=False, + ) + parser = add_arg_dataset_root(parser) + parser.add_argument( + "--empty", + action="store_true", + help=( + "Set all statuses to False in newly added records" + " (regardless of what is on disk)." + ), + ) + parser.add_argument( + "--regenerate", + action="store_true", + help=( + "Regenerate the doughnut file even if it already exists" + " (default: only append rows for new records)" + ), + ) + return parser + + +def add_subparser_dicom_reorg( + subparsers: _SubParsersAction, + formatter_class: type[HelpFormatter] = HelpFormatter, +) -> ArgumentParser: + """Add subparser for reorg command.""" + description = "(Re)organize raw DICOM files." # TODO give paths in layout model + parser = subparsers.add_parser( + COMMAND_DICOM_REORG, + description=description, + help=description, + formatter_class=formatter_class, + add_help=False, + ) + parser = add_arg_dataset_root(parser) + parser.add_argument( + "--copy-files", + action="store_true", + help=("Copy files when reorganizing (default: create symlinks)."), + ) + return parser + + +def add_subparser_bids_conversion( + subparsers: _SubParsersAction, formatter_class: type[HelpFormatter] = HelpFormatter +) -> ArgumentParser: + """Add subparser for run command.""" + description = "Convert to BIDS." + parser = subparsers.add_parser( + COMMAND_BIDS_CONVERSION, + description=description, + help=description, + formatter_class=formatter_class, + add_help=False, + ) + parser = add_arg_dataset_root(parser) + parser = add_args_pipeline(parser) + parser.add_argument( + "--pipeline-step", + type=str, + required=False, + help="Pipeline step.", + ) + parser = add_args_participant_and_session(parser) + parser = add_arg_simulate(parser) + return parser + + +def add_subparser_pipeline_run( + subparsers: _SubParsersAction, formatter_class: type[HelpFormatter] = HelpFormatter +) -> ArgumentParser: + """Add subparser for run command.""" + description = "Run a pipeline." + parser = subparsers.add_parser( + COMMAND_PIPELINE_RUN, + description=description, + help=description, + formatter_class=formatter_class, + add_help=False, + ) + parser = add_arg_dataset_root(parser) + parser = add_args_pipeline(parser) + parser = add_args_participant_and_session(parser) + parser = add_arg_simulate(parser) + return parser + + +def add_subparser_pipeline_track( + subparsers: _SubParsersAction, formatter_class: type[HelpFormatter] = HelpFormatter +) -> ArgumentParser: + """Add subparser for track command.""" + description = "Track the processing status of a pipeline." + parser = subparsers.add_parser( + COMMAND_PIPELINE_TRACK, + description=description, + help=description, + formatter_class=formatter_class, + add_help=False, + ) + parser = add_arg_dataset_root(parser) + parser = add_args_pipeline(parser) + parser = add_args_participant_and_session(parser) + return parser + + +def get_global_parser( + formatter_class: type[HelpFormatter] = HelpFormatter, +) -> ArgumentParser: + """Get the global parser.""" + global_parser = ArgumentParser( + prog=PROGRAM_NAME, + description="Organize and process neuroimaging-clinical datasets.", + epilog=( + f"Run '{PROGRAM_NAME} COMMAND --help'" + " for more information on a subcommand." + ), + formatter_class=formatter_class, + add_help=False, + ) + add_arg_help(global_parser) + + # subcommand parsers + subparsers = global_parser.add_subparsers( + title="Subcommands", + dest="command", + required=True, + ) + add_subparser_init(subparsers, formatter_class=formatter_class) + add_subparser_doughnut(subparsers, formatter_class=formatter_class) + add_subparser_dicom_reorg(subparsers, formatter_class=formatter_class) + add_subparser_bids_conversion(subparsers, formatter_class=formatter_class) + add_subparser_pipeline_run(subparsers, formatter_class=formatter_class) + add_subparser_pipeline_track(subparsers, formatter_class=formatter_class) + + # add common/global options to subcommand parsers + for parser in list(subparsers.choices.values()): + common_arg_group = parser.add_argument_group("Global options") + add_arg_layout(common_arg_group) + add_arg_verbosity(common_arg_group) + add_arg_dry_run(common_arg_group) + add_arg_help(common_arg_group) + + return global_parser diff --git a/nipoppy_cli/nipoppy/cli/run.py b/nipoppy_cli/nipoppy/cli/run.py new file mode 100644 index 00000000..f24b1946 --- /dev/null +++ b/nipoppy_cli/nipoppy/cli/run.py @@ -0,0 +1,105 @@ +"""Command-line interface.""" + +import sys +from typing import Sequence + +from rich_argparse import RichHelpFormatter + +from nipoppy.cli.parser import ( + COMMAND_BIDS_CONVERSION, + COMMAND_DICOM_REORG, + COMMAND_DOUGHNUT, + COMMAND_INIT, + COMMAND_PIPELINE_RUN, + COMMAND_PIPELINE_TRACK, + get_global_parser, +) +from nipoppy.logger import add_logfile, get_logger +from nipoppy.workflows.bids_conversion import BidsConversionRunner +from nipoppy.workflows.dataset_init import InitWorkflow +from nipoppy.workflows.dicom_reorg import DicomReorgWorkflow +from nipoppy.workflows.doughnut import DoughnutWorkflow +from nipoppy.workflows.runner import PipelineRunner +from nipoppy.workflows.tracker import PipelineTracker + + +def cli(argv: Sequence[str] = None) -> None: + """Entrypoint to the command-line interface.""" + if argv is None: + argv = sys.argv + parser = get_global_parser(formatter_class=RichHelpFormatter) + args = parser.parse_args(argv[1:]) + + # common arguments + command = args.command + fpath_layout = args.fpath_layout + logger = get_logger(name=command, level=args.verbosity) + dry_run = args.dry_run + + # to pass to all workflows + workflow_kwargs = dict(fpath_layout=fpath_layout, logger=logger, dry_run=dry_run) + + try: + dpath_root = args.dataset_root + + if command == COMMAND_INIT: + workflow = InitWorkflow( + dpath_root=dpath_root, + **workflow_kwargs, + ) + elif command == COMMAND_DOUGHNUT: + workflow = DoughnutWorkflow( + dpath_root=dpath_root, + empty=args.empty, + regenerate=args.regenerate, + **workflow_kwargs, + ) + elif command == COMMAND_DICOM_REORG: + workflow = DicomReorgWorkflow( + dpath_root=dpath_root, + copy_files=args.copy_files, + **workflow_kwargs, + ) + elif command == COMMAND_BIDS_CONVERSION: + workflow = BidsConversionRunner( + dpath_root=dpath_root, + pipeline_name=args.pipeline, + pipeline_version=args.pipeline_version, + pipeline_step=args.pipeline_step, + participant=args.participant, + session=args.session, + simulate=args.simulate, + **workflow_kwargs, + ) + elif command == COMMAND_PIPELINE_RUN: + workflow = PipelineRunner( + dpath_root=dpath_root, + pipeline_name=args.pipeline, + pipeline_version=args.pipeline_version, + participant=args.participant, + session=args.session, + simulate=args.simulate, + **workflow_kwargs, + ) + elif command == COMMAND_PIPELINE_TRACK: + workflow = PipelineTracker( + dpath_root=dpath_root, + pipeline_name=args.pipeline, + pipeline_version=args.pipeline_version, + participant=args.participant, + session=args.session, + **workflow_kwargs, + ) + else: + raise ValueError(f"Unsupported command: {command}") + + # cannot log to file in init since the dataset doesn't exist yet + if command != COMMAND_INIT: + add_logfile(logger, workflow.generate_fpath_log()) + + # run the workflow + workflow.run() + + except Exception: + logger.exception("Error when creating/running a workflow") + sys.exit(1) diff --git a/nipoppy_cli/nipoppy/config/__init__.py b/nipoppy_cli/nipoppy/config/__init__.py new file mode 100644 index 00000000..c3546700 --- /dev/null +++ b/nipoppy_cli/nipoppy/config/__init__.py @@ -0,0 +1,6 @@ +"""User/pipeline configurations.""" + +from .boutiques import BoutiquesConfig +from .container import ContainerConfig +from .main import Config +from .pipeline import PipelineConfig diff --git a/nipoppy_cli/nipoppy/config/boutiques.py b/nipoppy_cli/nipoppy/config/boutiques.py new file mode 100644 index 00000000..230422be --- /dev/null +++ b/nipoppy_cli/nipoppy/config/boutiques.py @@ -0,0 +1,30 @@ +"""Boutiques configuration model and utility functions.""" + +from pydantic import ConfigDict + +from nipoppy.config.container import ModelWithContainerConfig + +BOUTIQUES_CUSTOM_KEY = "custom" # as defined by Boutiques schema +BOUTIQUES_CONFIG_KEY = "nipoppy" + + +class BoutiquesConfig(ModelWithContainerConfig): + """Model for custom configuration within a Boutiques descriptor.""" + + # dpath_participant_session_result (for tarring/zipping/extracting) + # run_on (for choosing which participants/sessions to run on) + # bids_input (for pybids) + + model_config = ConfigDict(extra="forbid") + + +def get_boutiques_config_from_descriptor(descriptor: dict) -> BoutiquesConfig: + """Return the Boutiques configuration object from a descriptor.""" + try: + data = descriptor[BOUTIQUES_CUSTOM_KEY][BOUTIQUES_CONFIG_KEY] + except Exception: + raise RuntimeError( + "The Boutiques descriptor does not have a" + f" {BOUTIQUES_CUSTOM_KEY}/{BOUTIQUES_CONFIG_KEY} field: {descriptor}" + ) + return BoutiquesConfig(**data) diff --git a/nipoppy_cli/nipoppy/config/container.py b/nipoppy_cli/nipoppy/config/container.py new file mode 100644 index 00000000..63e80c96 --- /dev/null +++ b/nipoppy_cli/nipoppy/config/container.py @@ -0,0 +1,246 @@ +"""Container (i.e., Singularity/Apptainer) configuration model and utility functions.""" + +import argparse +import logging +import os +import shlex +import shutil +from pathlib import Path +from typing import Any, Optional + +from pydantic import BaseModel, ConfigDict + +from nipoppy.logger import get_logger + +# Apptainer +APPTAINER_BIND_FLAG = "--bind" +APPTAINER_BIND_SEP = ":" +APPTAINER_ENVVAR_PREFIXES = ["APPTAINERENV_", "SINGULARITYENV_"] + + +class ContainerConfig(BaseModel): + """Model for container configuration.""" + + COMMAND: str = "apptainer" + SUBCOMMAND: str = "run" + ARGS: list[str] = [] + ENV_VARS: dict[str, str] = {} + INHERIT: bool = True + + model_config = ConfigDict(extra="forbid") + + def add_bind_path( + self, + path_local: str | Path, + path_inside_container: Optional[str | Path] = None, + mode: str = "rw", + ): + """Add a bind path.""" + self.ARGS = add_bind_path_to_args( + self.ARGS, + path_local=path_local, + path_inside_container=path_inside_container, + mode=mode, + ) + + def merge_args_and_env_vars(self, other: Any): + """ + Merge arguments and environment variables with another instance. + + Arguments from other are appended to arguments of the current instance, + but environment variables from other do not overwrite those of the current + instance. + """ + if not isinstance(other, self.__class__): + raise TypeError( + f"Cannot merge {self.__class__} with object of type {type(other)}" + ) + + if self.ARGS != other.ARGS: + self.ARGS.extend(other.ARGS) + + for env_var, value in other.ENV_VARS.items(): + if env_var not in self.ENV_VARS: + self.ENV_VARS[env_var] = value + + return self + + +class ModelWithContainerConfig(BaseModel): + """To be inherited by configs that have a ContaienrConfig sub-config.""" + + CONTAINER_CONFIG: ContainerConfig = ContainerConfig() + + def get_container_config(self) -> ContainerConfig: + """Return the pipeline's ContainerConfig object.""" + return self.CONTAINER_CONFIG + + +def add_bind_path_to_args( + args: list[str], + path_local: str | Path, + path_inside_container: Optional[str | Path] = None, + mode: Optional[str] = "rw", +): + """Add a bind path to the container arguments. + + Parameters + ---------- + args : list[str] + Existing arguments + path_local : str | Path + Path on disk. If this is a relative path or contains symlinks, + it will be resolved + path_inside_container : Optional[str | Path], optional + Path inside the container (if None, will be the same as the local path), + by default None + mode : str, optional + Read/write permissions. + Only used if path_inside_container is given, by default "rw" + + Returns + ------- + list[str] + The updated argument list + """ + path_local = Path(path_local).resolve() + + bind_spec_components = [str(path_local)] + if path_inside_container is not None: + bind_spec_components.append(str(path_inside_container)) + if mode is not None: + bind_spec_components.append(mode) + + args.extend( + [ + APPTAINER_BIND_FLAG, + APPTAINER_BIND_SEP.join(bind_spec_components), + ] + ) + return args + + +def check_container_args( + args: list[str], logger: Optional[logging.Logger] = None +) -> list[str]: + """Check/fix bind flags in args.""" + if logger is None: + logger = get_logger("check_container_args") + + # use argparse to parse all the bind flags + bind_spec_dest = "bind" + parser = argparse.ArgumentParser(exit_on_error=False) + parser.add_argument( + APPTAINER_BIND_FLAG, dest=bind_spec_dest, action="extend", nargs=1 + ) + + replacement_map = {} + try: + # get all bind arguments + known_args, _ = parser.parse_known_args(args) + bind_specs = getattr(known_args, bind_spec_dest) + if bind_specs is not None: + for bind_spec in bind_specs: + # get the local path + bind_spec: str + bind_spec_components = bind_spec.split(APPTAINER_BIND_SEP) + path_local = Path(bind_spec_components[0]) + path_local_original = path_local + + logger.debug(f"Checking container bind spec: {bind_spec}") + + # path must be absolute and exist + path_local = path_local.resolve() + if path_local != path_local_original: + path_local = path_local.resolve() + logger.warning( + "Resolving path for container" + f": {path_local_original} -> {path_local}" + ) + if not path_local.exists(): + path_local.mkdir(parents=True) + logger.warning( + "Creating missing directory for container bind path" + f": {path_local}" + ) + + # replace bind spec in args + if path_local != path_local_original: + bind_spec_components[0] = str(path_local) + replacement_map[bind_spec] = APPTAINER_BIND_SEP.join( + bind_spec_components + ) + + except Exception as exception: + raise RuntimeError( + f"Error parsing {APPTAINER_BIND_FLAG} flags in container" + f" arguments: {args}. Make sure each flag is followed by a valid spec" + f" (e.g. {APPTAINER_BIND_FLAG} /path/local:/path/container:rw)" + f". Exact error was: {type(exception).__name__} {exception}" + ) + + # apply replacements + args_str = shlex.join(args) + for to_replace, replacement in replacement_map.items(): + args_str = args_str.replace(to_replace, replacement) + + return shlex.split(args_str) + + +def check_container_command(command: str) -> str: + """Check that the command is available (i.e. in PATH).""" + if not shutil.which(command): + raise RuntimeError( + f"Container executable not found: {command}" + ". Make sure it is installed and in your PATH." + ) + return command + + +def prepare_container( + container_config: ContainerConfig, + check=True, + logger: Optional[logging.Logger] = None, +) -> str: + """Build the command for container and set environment variables. + + Parameters + ---------- + container_config : ContainerConfig + Config object + check : bool, optional + Whether to validate config components and modify them + if needed, by default True + logger : Optional[logging.Logger], optional + Logger, by default None + + Returns + ------- + str + The command string + """ + command = container_config.COMMAND + subcommand = container_config.SUBCOMMAND + args = container_config.ARGS + env_vars = container_config.ENV_VARS + + if check: + command = check_container_command(command) + args = check_container_args(args, logger=logger) + + set_container_env_vars(env_vars, logger=logger) + + return shlex.join([command, subcommand] + args) + + +def set_container_env_vars( + env_vars: dict[str, str], logger: Optional[logging.Logger] = None +) -> None: + """Set environment variables for the container.""" + if logger is None: + logger = get_logger("set_container_env_vars") + for var, value in env_vars.items(): + for prefix in APPTAINER_ENVVAR_PREFIXES: + var_with_prefix = f"{prefix}{var}" + logger.info(f"Setting environment variable: {var_with_prefix}={value}") + os.environ[var_with_prefix] = value diff --git a/nipoppy_cli/nipoppy/config/main.py b/nipoppy_cli/nipoppy/config/main.py new file mode 100644 index 00000000..7de6d033 --- /dev/null +++ b/nipoppy_cli/nipoppy/config/main.py @@ -0,0 +1,117 @@ +"""Dataset configuration.""" + +from pathlib import Path +from typing import Any, Self + +from pydantic import ConfigDict, model_validator + +from nipoppy.config.container import ModelWithContainerConfig +from nipoppy.config.pipeline import PipelineConfig +from nipoppy.utils import check_session, load_json + + +class Config(ModelWithContainerConfig): + """Model for dataset configuration.""" + + DATASET_NAME: str + VISITS: list[str] + SESSIONS: list[str] = [] + BIDS: dict[str, dict[str, dict[str, PipelineConfig]]] = {} + PROC_PIPELINES: dict[str, dict[str, PipelineConfig]] + + model_config = ConfigDict(extra="allow") + + def _check_no_duplicate_pipeline(self) -> Self: + """Check that BIDS and PROC_PIPELINES do not have common pipelines.""" + bids_pipelines = set(self.BIDS.keys()) + proc_pipelines = set(self.PROC_PIPELINES.keys()) + if len(bids_pipelines & proc_pipelines) != 0: + raise ValueError( + "Cannot have the same pipeline under BIDS and PROC_PIPELINES" + f", got {bids_pipelines} and {proc_pipelines}" + ) + + def _propagate_container_config(self) -> Self: + """Propagate the container config to all pipelines.""" + + def _propagate(pipeline_or_pipeline_dicts: dict | PipelineConfig): + if isinstance(pipeline_or_pipeline_dicts, PipelineConfig): + pipeline_config = pipeline_or_pipeline_dicts + container_config = pipeline_config.get_container_config() + if container_config.INHERIT: + container_config.merge_args_and_env_vars(self.CONTAINER_CONFIG) + else: + for ( + child_pipeline_or_pipeline_dicts + ) in pipeline_or_pipeline_dicts.values(): + _propagate(child_pipeline_or_pipeline_dicts) + + _propagate(self.BIDS) + _propagate(self.PROC_PIPELINES) + + return self + + @model_validator(mode="before") + @classmethod + def check_input(cls, data: Any): + """Validate the raw input.""" + key_sessions = "SESSIONS" + key_visits = "VISITS" + if isinstance(data, dict): + # if sessions are not given, infer from visits + if key_sessions not in data: + data[key_sessions] = [ + check_session(visit) for visit in data[key_visits] + ] + + return data + + @model_validator(mode="after") + def validate_and_process(self) -> Self: + """Validate and process the configuration.""" + self._check_no_duplicate_pipeline() + self._propagate_container_config() + return self + + def get_pipeline_config( + self, + pipeline_name: str, + pipeline_version: str, + ) -> PipelineConfig: + """Get the config for a pipeline.""" + try: + return self.PROC_PIPELINES[pipeline_name][pipeline_version] + except KeyError: + raise ValueError(f"No config found for {pipeline_name} {pipeline_version}") + + def get_bids_pipeline_config( + self, pipeline_name: str, pipeline_version: str, pipeline_step: str + ) -> PipelineConfig: + """Get the config for a BIDS conversion pipeline.""" + try: + return self.BIDS[pipeline_name][pipeline_version][pipeline_step] + except KeyError: + raise ValueError( + "No config found for " + f"{pipeline_name} {pipeline_version} {pipeline_step}" + ) + + def save(self, fpath: str | Path, **kwargs): + """Save the config to a JSON file. + + Parameters + ---------- + fpath : str | Path + Path to the JSON file to write + """ + fpath = Path(fpath) + if "indent" not in kwargs: + kwargs["indent"] = 4 + fpath.parent.mkdir(parents=True, exist_ok=True) + with open(fpath, "w") as file: + file.write(self.model_dump_json(**kwargs)) + + @classmethod + def load(cls, path: str | Path) -> Self: + """Load a dataset configuration.""" + return cls(**load_json(path)) diff --git a/nipoppy_cli/nipoppy/config/pipeline.py b/nipoppy_cli/nipoppy/config/pipeline.py new file mode 100644 index 00000000..6bcf68ee --- /dev/null +++ b/nipoppy_cli/nipoppy/config/pipeline.py @@ -0,0 +1,69 @@ +"""Pipeline configuration.""" + +import re +from pathlib import Path +from typing import Optional, Sequence + +from pydantic import ConfigDict, model_validator + +from nipoppy.config.container import ModelWithContainerConfig + + +class PipelineConfig(ModelWithContainerConfig): + """Model for workflow configuration.""" + + DESCRIPTION: Optional[str] = None + CONTAINER: Optional[Path] = None + URI: Optional[str] = None + DESCRIPTOR: Optional[dict] = None + DESCRIPTOR_FILE: Optional[Path] = None + INVOCATION: Optional[dict] = None + INVOCATION_FILE: Optional[Path] = None + PYBIDS_IGNORE: list[re.Pattern] = [] + TRACKER_CONFIG: dict[str, list[str]] = {} + + model_config = ConfigDict(extra="forbid") + + @model_validator(mode="after") + def check_fields(self): + """ + Check that and _FILE fields are not both set. + + Also add an empty invocation if none is provided. + """ + field_pairs = [ + ("DESCRIPTOR", "DESCRIPTOR_FILE"), + ("INVOCATION", "INVOCATION_FILE"), + ] + for field_json, field_file in field_pairs: + value_json = getattr(self, field_json) + value_file = getattr(self, field_file) + if value_json is not None and value_file is not None: + raise ValueError( + f"Cannot specify both {field_json} and {field_file}" + f". Got {value_json} and {value_file} respectively." + ) + + if self.INVOCATION is None and self.INVOCATION_FILE is None: + self.INVOCATION = {} + + return self + + def get_container(self) -> Path: + """Return the path to the pipeline's container.""" + if self.CONTAINER is None: + raise RuntimeError("No container specified for the pipeline") + return self.CONTAINER + + def add_pybids_ignore_patterns( + self, + patterns: Sequence[str | re.Pattern] | str | re.Pattern, + ): + """Add pattern(s) to ignore for PyBIDS.""" + if isinstance(patterns, (str, re.Pattern)): + patterns = [patterns] + for pattern in patterns: + if isinstance(pattern, str): + pattern = re.compile(pattern) + if pattern not in self.PYBIDS_IGNORE: + self.PYBIDS_IGNORE.append(pattern) diff --git a/nipoppy_cli/nipoppy/data/descriptors/dcm2bids-3.1.0-convert.json b/nipoppy_cli/nipoppy/data/descriptors/dcm2bids-3.1.0-convert.json new file mode 100644 index 00000000..525c0a61 --- /dev/null +++ b/nipoppy_cli/nipoppy/data/descriptors/dcm2bids-3.1.0-convert.json @@ -0,0 +1,138 @@ +{ + "name": "dcm2bids", + "description": "dcm2bids", + "tool-version": "3.1.1", + "schema-version": "0.5", + "command-line": "[[NIPOPPY_CONTAINER_COMMAND]] [[NIPOPPY_FPATH_CONTAINER]] dcm2bids [DICOM_DIR] [PARTICIPANT] [SESSION] [CONFIG] [OUTPUT_DIR] [AUTO_EXTRACT_ENTITIES] [BIDS_VALIDATE] [FORCE_DCM2BIDS] [SKIP_DCM2NIIX] [CLOBBER] [LOG_LEVEL] [VERSION]", + "inputs": [ + { + "name": "dicom_dir", + "id": "dicom_dir", + "description": "DICOM directory(ies) or archive(s) (tar, tar.bz2, tar.gz or zip).", + "type": "String", + "list": true, + "optional": false, + "command-line-flag": "-d", + "value-key": "[DICOM_DIR]", + "default-value": [ + "[[NIPOPPY_DPATH_SOURCEDATA]]/[[NIPOPPY_SESSION]]/[[NIPOPPY_PARTICIPANT]]" + ] + }, + { + "name": "participant", + "id": "participant", + "description": "Participant ID.", + "type": "String", + "optional": false, + "command-line-flag": "-p", + "value-key": "[PARTICIPANT]", + "default-value": "[[NIPOPPY_PARTICIPANT]]" + }, + { + "name": "session", + "id": "session", + "description": "Session ID.", + "type": "String", + "optional": true, + "command-line-flag": "-s", + "value-key": "[SESSION]", + "default-value": "[[NIPOPPY_SESSION_SHORT]]" + }, + { + "name": "config", + "id": "config", + "description": "JSON configuration file (see example/config.json).", + "type": "String", + "optional": false, + "command-line-flag": "-c", + "value-key": "[CONFIG]" + }, + { + "name": "output_dir", + "id": "output_dir", + "description": "Output BIDS directory. [%(default)s]", + "type": "String", + "optional": true, + "command-line-flag": "-o", + "value-key": "[OUTPUT_DIR]", + "default-value": "[[NIPOPPY_DPATH_BIDS]]" + }, + { + "name": "auto_extract_entities", + "id": "auto_extract_entities", + "description": "If set, it will automatically try to extract entityinformation [task, dir, echo] based on the suffix and datatype. [%(default)s]", + "type": "Flag", + "optional": true, + "command-line-flag": "--auto_extract_entities", + "value-key": "[AUTO_EXTRACT_ENTITIES]" + }, + { + "name": "bids_validate", + "id": "bids_validate", + "description": "If set, once your conversion is done it will check if your output folder is BIDS valid. [%(default)s]\nbids-validator needs to be installed check: https://github.com/bids-standard/bids-validator#quickstart", + "type": "Flag", + "optional": true, + "command-line-flag": "--bids_validate", + "value-key": "[BIDS_VALIDATE]" + }, + { + "name": "force_dcm2bids", + "id": "force_dcm2bids", + "description": "Overwrite previous temporary dcm2bids output if it exists.", + "type": "Flag", + "optional": true, + "command-line-flag": "--force_dcm2bids", + "value-key": "[FORCE_DCM2BIDS]" + }, + { + "name": "skip_dcm2niix", + "id": "skip_dcm2niix", + "description": "Skip dcm2niix conversion. Option -d should contains NIFTI and json files.", + "type": "Flag", + "optional": true, + "command-line-flag": "--skip_dcm2niix", + "value-key": "[SKIP_DCM2NIIX]" + }, + { + "name": "clobber", + "id": "clobber", + "description": "Overwrite output if it exists.", + "type": "Flag", + "optional": true, + "command-line-flag": "--clobber", + "value-key": "[CLOBBER]" + }, + { + "name": "log_level", + "id": "log_level", + "description": "Set logging level to the console. [%(default)s]", + "type": "String", + "optional": true, + "default-value": "INFO", + "command-line-flag": "-l", + "value-key": "[LOG_LEVEL]", + "value-choices": [ + "DEBUG", + "INFO", + "WARNING", + "ERROR", + "CRITICAL" + ] + }, + { + "name": "version", + "id": "version", + "description": "Report dcm2bids version and the BIDS version.", + "type": "String", + "optional": true, + "command-line-flag": "-v", + "value-key": "[VERSION]" + } + ], + "tags": {}, + "suggested-resources": { + "cpu-cores": 1, + "ram": 1, + "walltime-estimate": 60 + } +} diff --git a/nipoppy_cli/nipoppy/data/descriptors/dcm2bids-3.1.0-prepare.json b/nipoppy_cli/nipoppy/data/descriptors/dcm2bids-3.1.0-prepare.json new file mode 100644 index 00000000..dc3e69ca --- /dev/null +++ b/nipoppy_cli/nipoppy/data/descriptors/dcm2bids-3.1.0-prepare.json @@ -0,0 +1,83 @@ +{ + "name": "dcm2bids_helper", + "description": "dcm2bids_helper", + "tool-version": "3.1.0", + "schema-version": "0.5", + "command-line": "[[NIPOPPY_CONTAINER_COMMAND]] [[NIPOPPY_FPATH_CONTAINER]] dcm2bids_helper [DICOM_DIR] [OUTPUT_DIR] [NEST] [OVERWRITE] [LOG_LEVEL]", + "inputs": [ + { + "name": "dicom_dir", + "id": "dicom_dir", + "description": "DICOM directory(ies) or archive(s) (tar, tar.bz2, tar.gz or zip).", + "type": "String", + "list": true, + "optional": false, + "command-line-flag": "-d", + "value-key": "[DICOM_DIR]", + "default-value": [ + "[[NIPOPPY_DPATH_SOURCEDATA]]/[[NIPOPPY_SESSION]]/[[NIPOPPY_PARTICIPANT]]" + ] + }, + { + "name": "output_dir", + "id": "output_dir", + "description": "Output directory.", + "type": "String", + "optional": true, + "command-line-flag": "-o", + "value-key": "[OUTPUT_DIR]", + "default-value": "[[NIPOPPY_DPATH_SCRATCH]]/dcm2bids_helper" + }, + { + "name": "nest", + "id": "nest", + "description": "Nest a directory in . Useful if many helper runs are needed\nto make a config file due to slight variations in MRI acquisitions.\nDefaults to DICOM_DIR if no name is provided.\n(Default: [%(default)s])", + "type": "String", + "optional": true, + "command-line-flag": "-n", + "value-key": "[NEST]" + }, + { + "name": "overwrite", + "id": "overwrite", + "description": "Force command to overwrite existing output files.", + "type": "Flag", + "optional": true, + "command-line-flag": "--force", + "value-key": "[OVERWRITE]" + }, + { + "name": "log_level", + "id": "log_level", + "description": "Set logging level to the console. [%(default)s]", + "type": "String", + "optional": true, + "default-value": "INFO", + "command-line-flag": "-l", + "value-key": "[LOG_LEVEL]", + "value-choices": [ + "DEBUG", + "INFO", + "WARNING", + "ERROR", + "CRITICAL" + ] + } + ], + "tags": {}, + "suggested-resources": { + "cpu-cores": 1, + "ram": 1, + "walltime-estimate": 60 + }, + "custom": { + "nipoppy": { + "CONTAINER_CONFIG": { + "ARGS": [ + "--bind", + "[[NIPOPPY_DPATH_SCRATCH]]/dcm2bids_helper" + ] + } + } + } +} diff --git a/nipoppy_cli/nipoppy/data/descriptors/fmriprep-20.2.7.json b/nipoppy_cli/nipoppy/data/descriptors/fmriprep-20.2.7.json new file mode 100644 index 00000000..37e498d0 --- /dev/null +++ b/nipoppy_cli/nipoppy/data/descriptors/fmriprep-20.2.7.json @@ -0,0 +1,620 @@ +{ + "name": "fmriprep", + "description": "fmriprep", + "tool-version": "20.2.7", + "schema-version": "0.5", + "command-line": "[[NIPOPPY_CONTAINER_COMMAND]] [[NIPOPPY_FPATH_CONTAINER]] [BIDS_DIR] [OUTPUT_DIR] [ANALYSIS_LEVEL] [VERSION] [SKIP_BIDS_VALIDATION] [PARTICIPANT_LABEL] [TASK_ID] [ECHO_IDX] [BIDS_FILTERS] [ANAT_DERIVATIVES] [BIDS_DATABASE_DIR] [NPROCS] [OMP_NTHREADS] [MEMORY_GB] [LOW_MEM] [USE_PLUGIN] [ANAT_ONLY] [BOILERPLATE_ONLY] [MD_ONLY_BOILERPLATE] [AROMA_ERR_ON_WARN] [VERBOSE_COUNT] [IGNORE] [LONGITUDINAL] [OUTPUT_SPACES] [BOLD2T1W_INIT] [BOLD2T1W_DOF] [USE_BBR] [MEDIAL_SURFACE_NAN] [SLICE_TIME_REF] [DUMMY_SCANS] [_RANDOM_SEED] [USE_AROMA] [AROMA_MELODIC_DIM] [REGRESSORS_ALL_COMPS] [REGRESSORS_FD_TH] [REGRESSORS_DVARS_TH] [SKULL_STRIP_TEMPLATE] [SKULL_STRIP_FIXED_SEED] [SKULL_STRIP_T1W] [FMAP_BSPLINE] [FMAP_NO_DEMEAN] [USE_SYN_SDC] [FORCE_SYN] [FS_LICENSE_FILE] [FS_SUBJECTS_DIR] [HIRES] [CIFTI_OUTPUT] [RUN_RECONALL] [OUTPUT_LAYOUT] [WORK_DIR] [CLEAN_WORKDIR] [RESOURCE_MONITOR] [REPORTS_ONLY] [CONFIG_FILE] [WRITE_GRAPH] [STOP_ON_FIRST_CRASH] [NOTRACK] [DEBUG] [SLOPPY]", + "inputs": [ + { + "id": "bids_dir", + "name": "bids_dir", + "description": "the root folder of a BIDS valid dataset (sub-XXXXX folders should be found at the top level in this folder).", + "optional": false, + "type": "String", + "value-key": "[BIDS_DIR]", + "default-value": "[[NIPOPPY_DPATH_BIDS]]" + }, + { + "id": "output_dir", + "name": "output_dir", + "description": "the output path for the outcomes of preprocessing and visual reports", + "optional": false, + "type": "String", + "value-key": "[OUTPUT_DIR]", + "default-value": "[[NIPOPPY_DPATH_PIPELINE_OUTPUT]]" + }, + { + "id": "analysis_level", + "name": "analysis_level", + "description": "processing stage to be run, only \"participant\" in the case of fMRIPrep (see BIDS-Apps specification).", + "optional": false, + "type": "String", + "value-key": "[ANALYSIS_LEVEL]", + "value-choices": [ + "participant" + ], + "default-value": "participant" + }, + { + "id": "version", + "name": "version", + "description": "show program's version number and exit", + "optional": true, + "type": "Flag", + "value-key": "[VERSION]", + "command-line-flag": "--version" + }, + { + "id": "skip_bids_validation", + "name": "skip_bids_validation", + "description": "assume the input dataset is BIDS compliant and skip the validation", + "optional": true, + "type": "Flag", + "value-key": "[SKIP_BIDS_VALIDATION]", + "command-line-flag": "--skip_bids_validation" + }, + { + "id": "participant_label", + "name": "participant_label", + "description": "a space delimited list of participant identifiers or a single identifier (the sub- prefix can be removed)", + "optional": true, + "type": "String", + "value-key": "[PARTICIPANT_LABEL]", + "list": true, + "command-line-flag": "--participant-label", + "default-value": [ + "[[NIPOPPY_PARTICIPANT]]" + ] + }, + { + "id": "task_id", + "name": "task_id", + "description": "select a specific task to be processed", + "optional": true, + "type": "String", + "value-key": "[TASK_ID]", + "command-line-flag": "-t" + }, + { + "id": "echo_idx", + "name": "echo_idx", + "description": "select a specific echo to be processed in a multiecho series", + "optional": true, + "type": "Number", + "value-key": "[ECHO_IDX]", + "command-line-flag": "--echo-idx" + }, + { + "id": "bids_filters", + "name": "bids_filters", + "description": "a JSON file describing custom BIDS input filters using PyBIDS. For further details, please check out https://fmriprep.readthedocs.io/en/20.2.7/faq.html#how-do-I-select-only-certain-files-to-be-input-to-fMRIPrep", + "optional": true, + "type": "String", + "value-key": "[BIDS_FILTERS]", + "command-line-flag": "--bids-filter-file" + }, + { + "id": "anat_derivatives", + "name": "anat_derivatives", + "description": "Reuse the anatomical derivatives from another fMRIPrep run or calculated with an alternative processing tool (NOT RECOMMENDED).", + "optional": true, + "type": "String", + "value-key": "[ANAT_DERIVATIVES]", + "command-line-flag": "--anat-derivatives" + }, + { + "id": "bids_database_dir", + "name": "bids_database_dir", + "description": "Path to a PyBIDS database folder, for faster indexing (especially useful for large datasets). Will be created if not present.", + "optional": true, + "type": "String", + "value-key": "[BIDS_DATABASE_DIR]", + "command-line-flag": "--bids-database-dir", + "default-value": "[[NIPOPPY_DPATH_PIPELINE_BIDS_DB]]" + }, + { + "id": "nprocs", + "name": "nprocs", + "description": "maximum number of threads across all processes", + "optional": true, + "type": "String", + "value-key": "[NPROCS]", + "command-line-flag": "--nprocs" + }, + { + "id": "omp_nthreads", + "name": "omp_nthreads", + "description": "maximum number of threads per-process", + "optional": true, + "type": "String", + "value-key": "[OMP_NTHREADS]", + "command-line-flag": "--omp-nthreads" + }, + { + "id": "memory_gb", + "name": "memory_gb", + "description": "upper bound memory limit for fMRIPrep processes", + "optional": true, + "type": "String", + "value-key": "[MEMORY_GB]", + "command-line-flag": "--mem" + }, + { + "id": "low_mem", + "name": "low_mem", + "description": "attempt to reduce memory usage (will increase disk usage in working directory)", + "optional": true, + "type": "Flag", + "value-key": "[LOW_MEM]", + "command-line-flag": "--low-mem" + }, + { + "id": "use_plugin", + "name": "use_plugin", + "description": "nipype plugin configuration file", + "optional": true, + "type": "String", + "value-key": "[USE_PLUGIN]", + "command-line-flag": "--use-plugin" + }, + { + "id": "anat_only", + "name": "anat_only", + "description": "run anatomical workflows only", + "optional": true, + "type": "Flag", + "value-key": "[ANAT_ONLY]", + "command-line-flag": "--anat-only" + }, + { + "id": "boilerplate_only", + "name": "boilerplate_only", + "description": "generate boilerplate only", + "optional": true, + "type": "Flag", + "value-key": "[BOILERPLATE_ONLY]", + "command-line-flag": "--boilerplate_only" + }, + { + "id": "md_only_boilerplate", + "name": "md_only_boilerplate", + "description": "skip generation of HTML and LaTeX formatted citation with pandoc", + "optional": true, + "type": "Flag", + "value-key": "[MD_ONLY_BOILERPLATE]", + "command-line-flag": "--md-only-boilerplate" + }, + { + "id": "aroma_err_on_warn", + "name": "aroma_err_on_warn", + "description": "Raise an error if ICA_AROMA does not produce sensible output (e.g., if all the components are classified as signal or noise)", + "optional": true, + "type": "Flag", + "value-key": "[AROMA_ERR_ON_WARN]", + "command-line-flag": "--error-on-aroma-warnings" + }, + { + "id": "verbose_count", + "name": "verbose_count", + "description": "increases log verbosity for each occurrence, debug level is -vvv", + "optional": true, + "type": "String", + "value-key": "[VERBOSE_COUNT]", + "value-choices": [ + "-v", + "-vv", + "-vvv" + ] + }, + { + "id": "ignore", + "name": "ignore", + "description": "ignore selected aspects of the input dataset to disable corresponding parts of the workflow (a space delimited list)", + "optional": true, + "type": "String", + "value-key": "[IGNORE]", + "list": true, + "value-choices": [ + "fieldmaps", + "slicetiming", + "sbref", + "t2w", + "flair" + ], + "command-line-flag": "--ignore" + }, + { + "id": "longitudinal", + "name": "longitudinal", + "description": "treat dataset as longitudinal - may increase runtime", + "optional": true, + "type": "Flag", + "value-key": "[LONGITUDINAL]", + "command-line-flag": "--longitudinal" + }, + { + "id": "output_spaces", + "name": "output_spaces", + "description": "Standard and non-standard spaces to resample anatomical and functional images to. Standard spaces may be specified by the form ``[:cohort-