diff --git a/.flake8 b/.flake8 index 68ddec59..75f847cc 100644 --- a/.flake8 +++ b/.flake8 @@ -8,7 +8,7 @@ exclude = venv, per-file-ignores = # - docstrings rules that should not be applied to tests - **/test_*: D100, D103, D104 + **/test_*: D100, D101, D103, D104 __init__.py:F401 # allow "weird indentation" tests/test_workflow_*.py: D100, D103, D104, E131, E127, E501 diff --git a/.github/workflows/run_tests_new.yml b/.github/workflows/run_tests_new.yml index 77e28e91..4189e43a 100644 --- a/.github/workflows/run_tests_new.yml +++ b/.github/workflows/run_tests_new.yml @@ -18,6 +18,10 @@ jobs: runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12"] + # only trigger on upstream repo if: github.repository_owner == 'neurodatascience' && github.event.repository.name == 'nipoppy' @@ -29,7 +33,7 @@ jobs: - uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: ${{ matrix.python-version }} - name: Install package run: | diff --git a/nipoppy_cli/docs/source/conf.py b/nipoppy_cli/docs/source/conf.py index 3620d600..962e6955 100644 --- a/nipoppy_cli/docs/source/conf.py +++ b/nipoppy_cli/docs/source/conf.py @@ -86,6 +86,9 @@ ("py:class", "argparse.HelpFormatter"), ("py:class", "argparse._SubParsersAction"), ("py:class", "argparse._ActionsContainer"), + ("py:class", "StrOrPathLike"), + ("py:class", "nipoppy.utils.StrOrPathLike"), + ("py:class", "typing_extensions.Self"), ] # -- Copybutton configuration ------------------------------------------------- diff --git a/nipoppy_cli/nipoppy/config/container.py b/nipoppy_cli/nipoppy/config/container.py index e9f813c9..7535b207 100644 --- a/nipoppy_cli/nipoppy/config/container.py +++ b/nipoppy_cli/nipoppy/config/container.py @@ -11,6 +11,7 @@ from pydantic import BaseModel, ConfigDict, Field from nipoppy.logger import get_logger +from nipoppy.utils import StrOrPathLike # Apptainer APPTAINER_BIND_FLAG = "--bind" @@ -53,8 +54,8 @@ class ContainerConfig(BaseModel): def add_bind_path( self, - path_local: str | Path, - path_inside_container: Optional[str | Path] = None, + path_local: StrOrPathLike, + path_inside_container: Optional[StrOrPathLike] = None, mode: str = "rw", ): """Add a bind path.""" @@ -100,8 +101,8 @@ def get_container_config(self) -> ContainerConfig: def add_bind_path_to_args( args: list[str], - path_local: str | Path, - path_inside_container: Optional[str | Path] = None, + path_local: StrOrPathLike, + path_inside_container: Optional[StrOrPathLike] = None, mode: Optional[str] = "rw", ): """Add a bind path to the container arguments. @@ -110,10 +111,10 @@ def add_bind_path_to_args( ---------- args : list[str] Existing arguments - path_local : str | Path + path_local : nipoppy.utils.StrOrPathLike Path on disk. If this is a relative path or contains symlinks, it will be resolved - path_inside_container : Optional[str | Path], optional + path_inside_container : Optional[nipoppy.utils.StrOrPathLike], optional Path inside the container (if None, will be the same as the local path), by default None mode : str, optional diff --git a/nipoppy_cli/nipoppy/config/main.py b/nipoppy_cli/nipoppy/config/main.py index 0836c12f..007667b0 100644 --- a/nipoppy_cli/nipoppy/config/main.py +++ b/nipoppy_cli/nipoppy/config/main.py @@ -1,13 +1,16 @@ """Dataset configuration.""" +from __future__ import annotations + from pathlib import Path -from typing import Any, Optional, Self +from typing import Any, Optional from pydantic import ConfigDict, Field, model_validator +from typing_extensions import Self from nipoppy.config.container import ModelWithContainerConfig from nipoppy.config.pipeline import PipelineConfig -from nipoppy.utils import check_session, load_json +from nipoppy.utils import StrOrPathLike, check_session, load_json class Config(ModelWithContainerConfig): @@ -106,12 +109,12 @@ def get_bids_pipeline_config( f"{pipeline_name} {pipeline_version} {pipeline_step}" ) - def save(self, fpath: str | Path, **kwargs): + def save(self, fpath: StrOrPathLike, **kwargs): """Save the config to a JSON file. Parameters ---------- - fpath : str | Path + fpath : nipoppy.utils.StrOrPathLike Path to the JSON file to write """ fpath = Path(fpath) @@ -122,6 +125,6 @@ def save(self, fpath: str | Path, **kwargs): file.write(self.model_dump_json(**kwargs)) @classmethod - def load(cls, path: str | Path) -> Self: + def load(cls, path: StrOrPathLike) -> Self: """Load a dataset configuration.""" return cls(**load_json(path)) diff --git a/nipoppy_cli/nipoppy/config/pipeline.py b/nipoppy_cli/nipoppy/config/pipeline.py index 6bcf68ee..b08c07da 100644 --- a/nipoppy_cli/nipoppy/config/pipeline.py +++ b/nipoppy_cli/nipoppy/config/pipeline.py @@ -1,5 +1,7 @@ """Pipeline configuration.""" +from __future__ import annotations + import re from pathlib import Path from typing import Optional, Sequence diff --git a/nipoppy_cli/nipoppy/layout.py b/nipoppy_cli/nipoppy/layout.py index 2bda1169..05e13b23 100644 --- a/nipoppy_cli/nipoppy/layout.py +++ b/nipoppy_cli/nipoppy/layout.py @@ -7,7 +7,12 @@ from pydantic import BaseModel, ConfigDict, Field from nipoppy.base import Base -from nipoppy.utils import FPATH_DEFAULT_LAYOUT, get_pipeline_tag, load_json +from nipoppy.utils import ( + FPATH_DEFAULT_LAYOUT, + StrOrPathLike, + get_pipeline_tag, + load_json, +) class PathInfo(BaseModel): @@ -129,17 +134,24 @@ class DatasetLayout(Base): """File/directory structure for a specific dataset.""" def __init__( - self, dpath_root: Path | str, fpath_config: Optional[Path | str] = None + self, + dpath_root: StrOrPathLike, + fpath_config: Optional[StrOrPathLike] = None, ): """Initialize the object. Parameters ---------- - dataset_root: Path | str + dpath_root : nipoppy.utils.StrOrPathLike Path to the root directory of the dataset. - fpath_config: Path | str | None - Path to layout config to use, by default None. + fpath_config : Optional[nipoppy.utils.StrOrPathLike], optional + Path to the layout config to use, by default None. If None, the default layout will be used. + + Raises + ------ + FileNotFoundError + If ``fpath_config`` does not exist. """ # use the default layout if none is specified if fpath_config is None: @@ -187,7 +199,7 @@ def __init__( self.dname_pipeline_work = "work" self.dname_pipeline_output = "output" - def get_full_path(self, path: str | Path) -> Path: + def get_full_path(self, path: StrOrPathLike) -> Path: """Build a full path from a relative path.""" return self.dpath_root / path diff --git a/nipoppy_cli/nipoppy/logger.py b/nipoppy_cli/nipoppy/logger.py index ac4bb6b4..8285107f 100644 --- a/nipoppy_cli/nipoppy/logger.py +++ b/nipoppy_cli/nipoppy/logger.py @@ -6,6 +6,8 @@ from rich.logging import RichHandler +from nipoppy.utils import StrOrPathLike + DATE_FORMAT = "[%Y-%m-%d %X]" FORMAT_RICH = "%(message)s" FORMAT_FILE = "%(asctime)s %(levelname)-7s %(message)s" @@ -23,7 +25,7 @@ def get_logger(name: Optional[str] = None, level: int = logging.INFO) -> logging return logging.getLogger(name=name) -def add_logfile(logger: logging.Logger, fpath_log: Path | str) -> None: +def add_logfile(logger: logging.Logger, fpath_log: StrOrPathLike) -> None: """Add a file handler to the logger.""" fpath_log = Path(fpath_log) diff --git a/nipoppy_cli/nipoppy/tabular/base.py b/nipoppy_cli/nipoppy/tabular/base.py index 76416fac..2e931c2a 100644 --- a/nipoppy_cli/nipoppy/tabular/base.py +++ b/nipoppy_cli/nipoppy/tabular/base.py @@ -1,14 +1,17 @@ """Generic class for tabular data.""" +from __future__ import annotations + import contextlib from abc import ABC, abstractmethod from pathlib import Path -from typing import Any, Optional, Self, Sequence +from typing import Any, Optional, Sequence import pandas as pd from pydantic import BaseModel, ValidationError, model_validator +from typing_extensions import Self -from nipoppy.utils import save_df_with_backup +from nipoppy.utils import StrOrPathLike, save_df_with_backup class BaseTabularModel(BaseModel): @@ -77,7 +80,7 @@ def model(self) -> type[BaseTabularModel]: raise NotImplementedError("model must be assigned in subclass") @classmethod - def load(cls, fpath: str | Path, validate=True, **kwargs) -> Self: + def load(cls, fpath: StrOrPathLike, validate=True, **kwargs) -> Self: """Load (and optionally validate) a tabular data file.""" if "dtype" in kwargs: raise ValueError( @@ -190,7 +193,7 @@ def concatenate(self, other: Self, validate=True) -> Self: def save_with_backup( self, - fpath_symlink: str | Path, + fpath_symlink: StrOrPathLike, dname_backups: Optional[str] = None, use_relative_path=True, sort=True, diff --git a/nipoppy_cli/nipoppy/tabular/doughnut.py b/nipoppy_cli/nipoppy/tabular/doughnut.py index e70e0817..928898d1 100644 --- a/nipoppy_cli/nipoppy/tabular/doughnut.py +++ b/nipoppy_cli/nipoppy/tabular/doughnut.py @@ -1,15 +1,19 @@ """Class for the doughnut file.""" +from __future__ import annotations + import logging from pathlib import Path -from typing import Optional, Self +from typing import Optional from pydantic import Field +from typing_extensions import Self from nipoppy.logger import get_logger from nipoppy.tabular.manifest import Manifest, ManifestModel from nipoppy.utils import ( FIELD_DESCRIPTION_MAP, + StrOrPathLike, participant_id_to_bids_id, participant_id_to_dicom_id, ) @@ -145,9 +149,9 @@ def get_bidsified_participants_sessions( def generate_doughnut( manifest: Manifest, - dpath_downloaded: Optional[str | Path] = None, - dpath_organized: Optional[str | Path] = None, - dpath_bidsified: Optional[str | Path] = None, + dpath_downloaded: Optional[StrOrPathLike] = None, + dpath_organized: Optional[StrOrPathLike] = None, + dpath_bidsified: Optional[StrOrPathLike] = None, empty=False, logger: Optional[logging.Logger] = None, # TODO allow custom map from participant_id to participant_dicom_dir @@ -155,7 +159,7 @@ def generate_doughnut( """Generate a doughnut object.""" def check_status( - dpath: Optional[str | Path], + dpath: Optional[StrOrPathLike], participant_dname: str, session: str, session_first=False, @@ -242,9 +246,9 @@ def check_status( def update_doughnut( doughnut: Doughnut, manifest: Manifest, - dpath_downloaded: Optional[str | Path] = None, - dpath_organized: Optional[str | Path] = None, - dpath_bidsified: Optional[str | Path] = None, + dpath_downloaded: Optional[StrOrPathLike] = None, + dpath_organized: Optional[StrOrPathLike] = None, + dpath_bidsified: Optional[StrOrPathLike] = None, empty=False, logger: Optional[logging.Logger] = None, ) -> Doughnut: diff --git a/nipoppy_cli/nipoppy/tabular/manifest.py b/nipoppy_cli/nipoppy/tabular/manifest.py index 590ed297..65e89a8f 100644 --- a/nipoppy_cli/nipoppy/tabular/manifest.py +++ b/nipoppy_cli/nipoppy/tabular/manifest.py @@ -1,9 +1,12 @@ """Class for the dataset manifest.""" -from typing import Optional, Self +from __future__ import annotations + +from typing import Optional import pandas as pd from pydantic import ConfigDict, Field +from typing_extensions import Self from nipoppy.tabular.base import BaseTabular, BaseTabularModel from nipoppy.utils import FIELD_DESCRIPTION_MAP diff --git a/nipoppy_cli/nipoppy/utils.py b/nipoppy_cli/nipoppy/utils.py index 085df8e9..46941535 100644 --- a/nipoppy_cli/nipoppy/utils.py +++ b/nipoppy_cli/nipoppy/utils.py @@ -1,15 +1,19 @@ """Utility functions.""" +from __future__ import annotations + import datetime import json import os import re from pathlib import Path -from typing import Optional +from typing import Optional, TypeVar import bids import pandas as pd +StrOrPathLike = TypeVar("StrOrPathLike", str, os.PathLike) + # BIDS BIDS_SUBJECT_PREFIX = "sub-" BIDS_SESSION_PREFIX = "ses-" @@ -85,8 +89,8 @@ def strip_session(session: Optional[str]): def create_bids_db( - dpath_bids: Path | str, - dpath_bids_db: Optional[Path | str] = None, + dpath_bids: StrOrPathLike, + dpath_bids_db: Optional[StrOrPathLike] = None, validate=False, reset_database=True, ignore_patterns: Optional[list[str | re.Pattern] | str | re.Pattern] = None, @@ -133,12 +137,12 @@ def get_pipeline_tag( return sep.join(components) -def load_json(fpath: str | Path, **kwargs) -> dict: +def load_json(fpath: StrOrPathLike, **kwargs) -> dict: """Load a JSON file. Parameters ---------- - fpath : str | Path + fpath : nipoppy.utils.StrOrPathLike Path to the JSON file **kwargs : Keyword arguments to pass to json.load @@ -152,14 +156,14 @@ def load_json(fpath: str | Path, **kwargs) -> dict: return json.load(file, **kwargs) -def save_json(obj: dict, fpath: str | Path, **kwargs): +def save_json(obj: dict, fpath: StrOrPathLike, **kwargs): """Save a JSON object to a file. Parameters ---------- obj : dict The JSON object - fpath : str | Path + fpath : nipoppy.utils.StrOrPathLike Path to the JSON file to write indent : int, optional Indentation level, by default 4 @@ -174,14 +178,14 @@ def save_json(obj: dict, fpath: str | Path, **kwargs): json.dump(obj, file, **kwargs) -def add_path_suffix(path: Path | str, suffix: str, sep="-") -> Path: +def add_path_suffix(path: StrOrPathLike, suffix: str, sep="-") -> Path: """Add a suffix to a path, before the last file extension (if any).""" path = Path(path) return Path(path.parent, f"{path.stem}{sep}{suffix}{path.suffix}") def add_path_timestamp( - path: Path | str, timestamp_format="%Y%m%d_%H%M", sep="-" + path: StrOrPathLike, timestamp_format="%Y%m%d_%H%M", sep="-" ) -> Path: """Add a timestamp to a path, before the last file extension (if any).""" timestamp = datetime.datetime.now().strftime(timestamp_format) @@ -190,7 +194,7 @@ def add_path_timestamp( def save_df_with_backup( df: pd.DataFrame, - fpath_symlink: str | Path, + fpath_symlink: StrOrPathLike, dname_backups: Optional[str] = None, use_relative_path=True, dry_run=False, @@ -202,7 +206,7 @@ def save_df_with_backup( ---------- df : pd.DataFrame The dataframe to save - fpath_symlink : str | Path + fpath_symlink : nipoppy.utils.StrOrPathLike The path to the symlink dname_backups : Optional[str], optional The directory where the timestamped backup file should be written diff --git a/nipoppy_cli/nipoppy/workflows/base.py b/nipoppy_cli/nipoppy/workflows/base.py index dea0d33a..791a67fa 100644 --- a/nipoppy_cli/nipoppy/workflows/base.py +++ b/nipoppy_cli/nipoppy/workflows/base.py @@ -1,5 +1,7 @@ """Workflow utilities.""" +from __future__ import annotations + import logging import os import shlex @@ -17,7 +19,7 @@ from nipoppy.tabular.base import BaseTabular from nipoppy.tabular.doughnut import Doughnut, generate_doughnut from nipoppy.tabular.manifest import Manifest -from nipoppy.utils import add_path_timestamp +from nipoppy.utils import StrOrPathLike, add_path_timestamp LOG_SUFFIX = ".log" @@ -33,9 +35,9 @@ class BaseWorkflow(Base, ABC): def __init__( self, - dpath_root: Path | str, + dpath_root: StrOrPathLike, name: str, - fpath_layout: Optional[Path] = None, + fpath_layout: Optional[StrOrPathLike] = None, logger: Optional[logging.Logger] = None, dry_run=False, ): @@ -43,7 +45,7 @@ def __init__( Parameters ---------- - dpath_root : Path | str + dpath_root : nipoppy.utils.StrOrPathLike Path the the root directory of the dataset. name : str Name of the workflow, used for logging. @@ -101,7 +103,7 @@ def run_command( Parameters ---------- - command_or_args : Sequence[str] | str + command_or_args : Sequence[str] | str The command to run. check : bool, optional If True, raise an error if the process exits with a non-zero code, @@ -111,7 +113,7 @@ def run_command( Returns ------- - subprocess.Popen | str + subprocess.Popen or str """ def process_output( diff --git a/nipoppy_cli/nipoppy/workflows/bids_conversion.py b/nipoppy_cli/nipoppy/workflows/bids_conversion.py index c2d38fe0..aa531e17 100644 --- a/nipoppy_cli/nipoppy/workflows/bids_conversion.py +++ b/nipoppy_cli/nipoppy/workflows/bids_conversion.py @@ -1,12 +1,14 @@ """Workflow for convert command.""" +from __future__ import annotations + import logging from functools import cached_property from pathlib import Path from typing import Optional from nipoppy.config.pipeline import PipelineConfig -from nipoppy.utils import get_pipeline_tag +from nipoppy.utils import StrOrPathLike, get_pipeline_tag from nipoppy.workflows.runner import PipelineRunner @@ -15,14 +17,14 @@ class BidsConversionRunner(PipelineRunner): def __init__( self, - dpath_root: Path | str, + dpath_root: StrOrPathLike, pipeline_name: str, pipeline_version: str, pipeline_step: str, participant: str = None, session: str = None, simulate: bool = False, - fpath_layout: Optional[Path] = None, + fpath_layout: Optional[StrOrPathLike] = None, logger: Optional[logging.Logger] = None, dry_run: bool = False, ): diff --git a/nipoppy_cli/nipoppy/workflows/dataset_init.py b/nipoppy_cli/nipoppy/workflows/dataset_init.py index 55f95fcb..6138233d 100644 --- a/nipoppy_cli/nipoppy/workflows/dataset_init.py +++ b/nipoppy_cli/nipoppy/workflows/dataset_init.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Optional -from nipoppy.utils import FPATH_SAMPLE_CONFIG, FPATH_SAMPLE_MANIFEST +from nipoppy.utils import FPATH_SAMPLE_CONFIG, FPATH_SAMPLE_MANIFEST, StrOrPathLike from nipoppy.workflows.base import BaseWorkflow @@ -17,7 +17,7 @@ class InitWorkflow(BaseWorkflow): def __init__( self, dpath_root: Path, - fpath_layout: Optional[Path] = None, + fpath_layout: Optional[StrOrPathLike] = None, logger: Optional[logging.Logger] = None, dry_run: bool = False, ): diff --git a/nipoppy_cli/nipoppy/workflows/dicom_reorg.py b/nipoppy_cli/nipoppy/workflows/dicom_reorg.py index 93aabde9..07c598f8 100644 --- a/nipoppy_cli/nipoppy/workflows/dicom_reorg.py +++ b/nipoppy_cli/nipoppy/workflows/dicom_reorg.py @@ -5,6 +5,7 @@ from pathlib import Path from typing import Optional +from nipoppy.utils import StrOrPathLike from nipoppy.workflows.base import BaseWorkflow @@ -13,9 +14,9 @@ class DicomReorgWorkflow(BaseWorkflow): def __init__( self, - dpath_root: Path | str, + dpath_root: StrOrPathLike, copy_files: bool = False, - fpath_layout: Optional[Path] = None, + fpath_layout: Optional[StrOrPathLike] = None, logger: Optional[logging.Logger] = None, dry_run: bool = False, ): diff --git a/nipoppy_cli/nipoppy/workflows/doughnut.py b/nipoppy_cli/nipoppy/workflows/doughnut.py index 001aca27..4196ee43 100644 --- a/nipoppy_cli/nipoppy/workflows/doughnut.py +++ b/nipoppy_cli/nipoppy/workflows/doughnut.py @@ -5,6 +5,7 @@ from typing import Optional from nipoppy.tabular.doughnut import Doughnut, generate_doughnut, update_doughnut +from nipoppy.utils import StrOrPathLike from nipoppy.workflows.base import BaseWorkflow @@ -16,7 +17,7 @@ def __init__( dpath_root: Path, empty: bool = False, regenerate: bool = False, - fpath_layout: Optional[Path] = None, + fpath_layout: Optional[StrOrPathLike] = None, logger: Optional[logging.Logger] = None, dry_run: bool = False, ): diff --git a/nipoppy_cli/nipoppy/workflows/pipeline.py b/nipoppy_cli/nipoppy/workflows/pipeline.py index 6c37ddda..41291c10 100644 --- a/nipoppy_cli/nipoppy/workflows/pipeline.py +++ b/nipoppy_cli/nipoppy/workflows/pipeline.py @@ -1,5 +1,7 @@ """Base class for pipeline workflows.""" +from __future__ import annotations + import json import logging from abc import ABC, abstractmethod @@ -19,6 +21,7 @@ BIDS_SESSION_PREFIX, BIDS_SUBJECT_PREFIX, DPATH_DESCRIPTORS, + StrOrPathLike, check_participant, check_session, create_bids_db, @@ -36,13 +39,13 @@ class BasePipelineWorkflow(BaseWorkflow, ABC): def __init__( self, - dpath_root: Path | str, + dpath_root: StrOrPathLike, name: str, pipeline_name: str, pipeline_version: str, participant: str = None, session: str = None, - fpath_layout: Optional[Path | str] = None, + fpath_layout: Optional[StrOrPathLike] = None, logger: Optional[logging.Logger] = None, dry_run=False, ): @@ -114,7 +117,9 @@ def fpath_container(self) -> Path: ) return fpath_container - def _check_files_for_json(self, fpaths: str | Path | list[str | Path]) -> dict: + def _check_files_for_json( + self, fpaths: StrOrPathLike | list[StrOrPathLike] + ) -> dict: if isinstance(fpaths, (str, Path)): fpaths = [fpaths] for fpath in fpaths: @@ -272,7 +277,7 @@ def get_boutiques_config(self, participant: str, session: str): def set_up_bids_db( self, - dpath_bids_db: Path | str, + dpath_bids_db: StrOrPathLike, participant: Optional[str] = None, session: Optional[str] = None, ) -> bids.BIDSLayout: diff --git a/nipoppy_cli/nipoppy/workflows/runner.py b/nipoppy_cli/nipoppy/workflows/runner.py index 63cf357a..8a22e373 100644 --- a/nipoppy_cli/nipoppy/workflows/runner.py +++ b/nipoppy_cli/nipoppy/workflows/runner.py @@ -1,13 +1,13 @@ """PipelineRunner workflow.""" import logging -from pathlib import Path from typing import Optional from boutiques import bosh from nipoppy.config.boutiques import BoutiquesConfig from nipoppy.config.container import ContainerConfig, prepare_container +from nipoppy.utils import StrOrPathLike from nipoppy.workflows.pipeline import BasePipelineWorkflow @@ -16,13 +16,13 @@ class PipelineRunner(BasePipelineWorkflow): def __init__( self, - dpath_root: Path | str, + dpath_root: StrOrPathLike, pipeline_name: str, pipeline_version: str, participant: str = None, session: str = None, simulate: bool = False, - fpath_layout: Optional[Path] = None, + fpath_layout: Optional[StrOrPathLike] = None, logger: Optional[logging.Logger] = None, dry_run: bool = False, ): @@ -46,7 +46,7 @@ def process_container_config( self, participant: str, session: str, - bind_paths: Optional[list[str | Path]] = None, + bind_paths: Optional[list[StrOrPathLike]] = None, ) -> str: """Update container config and generate container command.""" if bind_paths is None: diff --git a/nipoppy_cli/nipoppy/workflows/tracker.py b/nipoppy_cli/nipoppy/workflows/tracker.py index 2be6f17a..112a61be 100644 --- a/nipoppy_cli/nipoppy/workflows/tracker.py +++ b/nipoppy_cli/nipoppy/workflows/tracker.py @@ -1,10 +1,10 @@ """PipelineTracker workflow.""" import logging -from pathlib import Path from typing import Optional from nipoppy.tabular.bagel import Bagel +from nipoppy.utils import StrOrPathLike from nipoppy.workflows.pipeline import BasePipelineWorkflow @@ -13,12 +13,12 @@ class PipelineTracker(BasePipelineWorkflow): def __init__( self, - dpath_root: Path | str, + dpath_root: StrOrPathLike, pipeline_name: str, pipeline_version: str, participant: str = None, session: str = None, - fpath_layout: Optional[Path] = None, + fpath_layout: Optional[StrOrPathLike] = None, logger: Optional[logging.Logger] = None, dry_run: bool = False, ): diff --git a/nipoppy_cli/pyproject.toml b/nipoppy_cli/pyproject.toml index f37c82d5..69b86109 100644 --- a/nipoppy_cli/pyproject.toml +++ b/nipoppy_cli/pyproject.toml @@ -26,12 +26,13 @@ dependencies = [ "pydantic", "rich", "rich_argparse", + "typing-extensions", ] description = "Standardized organization and processing of neuroimaging-clinical datasets" license = { file = "LICENSE" } name = "nipoppy" readme = "../README.md" -# TODO requires_python +requires-python = ">=3.9" version = "1.0.0" # TODO eventually use dynamic versioning [project.optional-dependencies] @@ -41,19 +42,14 @@ doc = [ "pygments-csv-lexer>=0.1.3", "sphinx>=7.2.6", "sphinx-argparse>=0.4.0", - "sphinx-autoapi>=3.0.0", + "sphinx-autoapi==3.0.0", "sphinx-copybutton>=0.5.2", "sphinx-jsonschema>=1.19.1", "sphinx-togglebutton>=0.3.2", "mdit-py-plugins>=0.4.0", "myst-parser>=2.0.0", ] -test = [ - "pytest>=6.0.0", - "pytest-cov", - "pytest-mock", - "fids>=0.1.0", -] +test = ["pytest>=6.0.0", "pytest-cov", "pytest-mock", "fids>=0.1.0"] tests = ["nipoppy[test]"] # alias in case of typo [project.scripts] diff --git a/nipoppy_cli/tests/conftest.py b/nipoppy_cli/tests/conftest.py index 58c0f4e2..578bc9dc 100644 --- a/nipoppy_cli/tests/conftest.py +++ b/nipoppy_cli/tests/conftest.py @@ -1,5 +1,7 @@ """Utilities for tests.""" +from __future__ import annotations + import datetime from pathlib import Path from typing import Optional @@ -13,7 +15,7 @@ from nipoppy.config.main import Config from nipoppy.tabular.doughnut import Doughnut from nipoppy.tabular.manifest import Manifest -from nipoppy.utils import strip_session +from nipoppy.utils import StrOrPathLike, strip_session FPATH_CONFIG = "proc/global_configs.json" FPATH_MANIFEST = "tabular/manifest.csv" @@ -125,7 +127,7 @@ def _process_participants_sessions( def _fake_dicoms( - dpath: str | Path, + dpath: StrOrPathLike, participants_and_sessions: Optional[dict[str, list[str]]] = None, participants: Optional[list[str]] = None, sessions: Optional[list[str]] = None, @@ -189,7 +191,7 @@ def _fake_dicoms( def fake_dicoms_downloaded( - dpath: str | Path, + dpath: StrOrPathLike, participants_and_sessions: Optional[dict[str, list[str]]] = None, participants: Optional[list[str]] = None, sessions: Optional[list[str]] = None, @@ -220,7 +222,7 @@ def fake_dicoms_downloaded( def fake_dicoms_organized( - dpath: str | Path, + dpath: StrOrPathLike, participants_and_sessions: Optional[dict[str, list[str]]] = None, participants: Optional[list[str]] = None, sessions: Optional[list[str]] = None, @@ -253,9 +255,9 @@ def prepare_dataset( participants_and_sessions_downloaded: Optional[dict[str, list[str]]] = None, participants_and_sessions_organized: Optional[dict[str, list[str]]] = None, participants_and_sessions_bidsified: Optional[dict[str, list[str]]] = None, - dpath_downloaded: Optional[str | Path] = None, - dpath_organized: Optional[str | Path] = None, - dpath_bidsified: Optional[str | Path] = None, + dpath_downloaded: Optional[StrOrPathLike] = None, + dpath_organized: Optional[StrOrPathLike] = None, + dpath_bidsified: Optional[StrOrPathLike] = None, ): """Create dummy imaging files for testing the DICOM-to-BIDS conversion process.""" # create the manifest diff --git a/nipoppy_cli/tests/test_tabular_base.py b/nipoppy_cli/tests/test_tabular_base.py index 896cf258..25eff38a 100644 --- a/nipoppy_cli/tests/test_tabular_base.py +++ b/nipoppy_cli/tests/test_tabular_base.py @@ -189,7 +189,7 @@ def test_concatenate_error(data1: list[dict], data2: list[dict]): ) def test_save_with_backup( fname: str, - dname_backups: str | None, + dname_backups: Optional[str], dname_backups_processed: str, tmp_path: Path, ): diff --git a/nipoppy_cli/tests/test_tabular_doughnut.py b/nipoppy_cli/tests/test_tabular_doughnut.py index 1724f14e..95c36dfd 100644 --- a/nipoppy_cli/tests/test_tabular_doughnut.py +++ b/nipoppy_cli/tests/test_tabular_doughnut.py @@ -6,6 +6,7 @@ import pytest from nipoppy.tabular.doughnut import Doughnut, generate_doughnut, update_doughnut +from nipoppy.utils import StrOrPathLike from .conftest import DPATH_TEST_DATA, check_doughnut, prepare_dataset @@ -176,9 +177,9 @@ def test_generate_and_update( participants_and_sessions_downloaded: dict[str, list[str]], participants_and_sessions_organized: dict[str, list[str]], participants_and_sessions_bidsified: dict[str, list[str]], - dpath_downloaded_relative: str | Path, - dpath_organized_relative: str | Path, - dpath_bidsified_relative: str | Path, + dpath_downloaded_relative: StrOrPathLike, + dpath_organized_relative: StrOrPathLike, + dpath_bidsified_relative: StrOrPathLike, empty: bool, str_paths: bool, tmp_path: Path, diff --git a/nipoppy_cli/tests/test_utils.py b/nipoppy_cli/tests/test_utils.py index dc9f16a5..8ed7c8c7 100644 --- a/nipoppy_cli/tests/test_utils.py +++ b/nipoppy_cli/tests/test_utils.py @@ -3,6 +3,7 @@ import json import re from pathlib import Path +from typing import Optional import pandas as pd import pytest @@ -194,7 +195,7 @@ def test_add_path_timestamp(timestamp_format, expected, datetime_fixture): # no ) def test_save_df_with_backup( fname: str, - dname_backups: str | None, + dname_backups: Optional[str], dname_backups_processed: str, tmp_path: Path, ): diff --git a/nipoppy_cli/tests/test_workflow_pipeline.py b/nipoppy_cli/tests/test_workflow_pipeline.py index 7eb3551d..e0e7c4e5 100644 --- a/nipoppy_cli/tests/test_workflow_pipeline.py +++ b/nipoppy_cli/tests/test_workflow_pipeline.py @@ -10,7 +10,7 @@ from nipoppy.config.boutiques import BoutiquesConfig from nipoppy.config.main import PipelineConfig -from nipoppy.utils import strip_session +from nipoppy.utils import StrOrPathLike, strip_session from nipoppy.workflows.pipeline import BasePipelineWorkflow from .conftest import datetime_fixture # noqa F401 @@ -20,12 +20,12 @@ class PipelineWorkflow(BasePipelineWorkflow): def __init__( self, - dpath_root: Path | str, + dpath_root: StrOrPathLike, pipeline_name: str, pipeline_version: str, participant: str = None, session: str = None, - fpath_layout: Optional[Path] = None, + fpath_layout: Optional[StrOrPathLike] = None, logger: Optional[logging.Logger] = None, dry_run: bool = False, ): @@ -105,7 +105,7 @@ def workflow(tmp_path: Path): ) -def _make_dummy_json(fpath: str | Path): +def _make_dummy_json(fpath: StrOrPathLike): fpath = Path(fpath) fpath.parent.mkdir(parents=True, exist_ok=True) fpath.write_text("{}\n")