From a1115543a383e823b5885314255414c84a18ca2f Mon Sep 17 00:00:00 2001 From: Remi Gau Date: Thu, 25 Apr 2024 17:17:38 +0200 Subject: [PATCH] [MAINT] Expand testing and add coverage (#144) * add tests for mriqc and tractoflow * add expected string * add expected string * blacify * lint * rm tractoflow * fix fmriprep tests * fix mriqc tests * start fixing tests for tractoflow * update flake8 configf * reset some linted files to master * reset some linted files to master * refix * rm test tractoflow * reset tractoflow to main * fix MRIQC test --------- Co-authored-by: Michelle Wang --- .flake8 | 14 +++ .github/workflows/tests.yml | 24 ++++- .gitignore | 6 +- .pre-commit-config.yaml | 13 +++ .../proc_pipe/fmriprep/run_fmriprep.py | 18 ++-- nipoppy/workflow/proc_pipe/mriqc/run_mriqc.py | 2 + .../workflow/proc_pipe/tractoflow/__init__.py | 0 pyproject.toml | 17 +++- tests/Makefile | 7 ++ tests/conftest.py | 39 +++++++- tests/data/.gitignore | 1 + tests/data/test_global_configs.json | 11 ++- tests/test_workflow_fmriprep.py | 96 +++++++++---------- tests/test_workflow_mriqc.py | 92 ++++++++++++++++++ 14 files changed, 265 insertions(+), 75 deletions(-) create mode 100644 .flake8 rename .gitmodules => nipoppy/workflow/proc_pipe/tractoflow/__init__.py (100%) create mode 100644 tests/Makefile create mode 100644 tests/data/.gitignore create mode 100644 tests/test_workflow_mriqc.py diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..ef0865e8 --- /dev/null +++ b/.flake8 @@ -0,0 +1,14 @@ +[flake8] +exclude = + .git, + __pycache__, + build, + dist, + env, + venv, +per-file-ignores = + # - docstrings rules that should not be applied to tests + tests/*: D100, D103, D104 + # allow "weird indentation" + tests/test_workflow_*.py: D100, D103, D104, E131, E127, E501 +max-line-length = 90 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1908ff9b..c5dbe1c3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -7,7 +7,7 @@ on: pull_request: branches: ['*'] -# cancel previous runs if new one is triggered +# cancel previous runs if new one is triggered concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest # only trigger on upstream repo - if: github.repository_owner == 'neurodatascience' && github.event.repository.name == 'nipoppy' + if: github.repository_owner == 'neurodatascience' && github.event.repository.name == 'nipoppy' steps: @@ -31,11 +31,25 @@ jobs: with: python-version: '3.11' - - name: Install + - name: Install dependencies + run: | + sudo apt-get update + sudo apt-get install -y git-annex + + - name: Install nipoppy run: | pip install -U pip pip install .[tests] + - name: Install data + run: make -C tests data/ds004097 + - name: Test - run: | - python -m pytest \ No newline at end of file + run: python -m pytest tests --cov-report=xml + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + file: ./coverage.xml + name: codecov-umbrella + fail_ci_if_error: false diff --git a/.gitignore b/.gitignore index f711afe9..e4ca476c 100644 --- a/.gitignore +++ b/.gitignore @@ -17,7 +17,7 @@ share/python-wheels/ nipoppy/notebooks/.ipynb_checkpoints/* # dataset-specific files -bids_filter.json +bids_filter.json workflow/proc_pipe/fmriprep/scripts/fmriprep_sge.sh workflow/proc_pipe/fmriprep/scripts/fmriprep_slurm.sh workflow/dicom_org/dicom_dir_func.py @@ -25,4 +25,8 @@ workflow/dicom_org/dicom_dir_func.py # logs/testing *.out *.log +.coverage +htmlcov + + env/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f0877bab..39a9029b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,6 +3,19 @@ # See https://pre-commit.com/hooks.html for more hooks repos: + +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-ast + - id: check-case-conflict + - id: check-json + - id: check-merge-conflict + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-toml + - repo: https://github.com/codespell-project/codespell rev: v2.2.5 hooks: diff --git a/nipoppy/workflow/proc_pipe/fmriprep/run_fmriprep.py b/nipoppy/workflow/proc_pipe/fmriprep/run_fmriprep.py index 833094aa..12733bf0 100755 --- a/nipoppy/workflow/proc_pipe/fmriprep/run_fmriprep.py +++ b/nipoppy/workflow/proc_pipe/fmriprep/run_fmriprep.py @@ -42,7 +42,7 @@ def run_fmriprep(participant_id: str, fmriprep_home_dir = f"{fmriprep_out_dir}/fmriprep_home_{participant_id}/" Path(f"{fmriprep_home_dir}").mkdir(parents=True, exist_ok=True) - bids_db_dir = f"/fmripre_proc/{DNAME_BIDS_DB}" + bids_db_dir = f"/fmriprep_proc/{DNAME_BIDS_DB}" bids_db_dir_outside_container = f"{proc_dir}/{DNAME_BIDS_DB}" if not Path(bids_db_dir_outside_container).exists(): @@ -60,12 +60,12 @@ def run_fmriprep(participant_id: str, logger.error(f"Expected to find only files in {bids_db_dir_outside_container} but found directory {path}") sys.exit(1) - # Singularity CMD + # Singularity CMD SINGULARITY_CMD=f"singularity run \ -B {bids_dir}:{bids_dir} \ -B {fmriprep_home_dir}:/home/fmriprep --home /home/fmriprep --cleanenv \ -B {fmriprep_out_dir}:/output \ - -B {proc_dir}:/fmripre_proc \ + -B {proc_dir}:/fmriprep_proc \ -B {templateflow_dir}:{SINGULARITY_TEMPLATEFLOW_DIR} \ -B {fmriprep_dir}:/work \ -B {fs_dir}:{SINGULARITY_FS_DIR} \ @@ -89,7 +89,7 @@ def run_fmriprep(participant_id: str, # Append optional args if use_bids_filter: logger.info("Using bids_filter.json") - bids_filter_str = f"--bids-filter-file /fmripre_proc/bids_filter_fmriprep.json" + bids_filter_str = f"--bids-filter-file /fmriprep_proc/bids_filter_fmriprep.json" fmriprep_CMD = f"{fmriprep_CMD} {bids_filter_str}" if anat_only: @@ -97,7 +97,7 @@ def run_fmriprep(participant_id: str, anat_only_str = "--anat-only" fmriprep_CMD = f"{fmriprep_CMD} {anat_only_str}" - CMD_ARGS = SINGULARITY_CMD + fmriprep_CMD + CMD_ARGS = SINGULARITY_CMD + fmriprep_CMD CMD = CMD_ARGS.split() logger.info("Running fmriprep...") @@ -108,7 +108,7 @@ def run_fmriprep(participant_id: str, fmriprep_proc = subprocess.run(CMD) except Exception as e: logger.error(f"fmriprep run failed with exceptions: {e}") - + logger.info(f"Successfully completed fmriprep run for participant: {participant_id}") logger.info("-"*75) logger.info("") @@ -164,7 +164,7 @@ def run(participant_id: str, # Copy bids_filter.json `/bids/bids_filter.json` if use_bids_filter: logger.info(f"Copying ./bids_filter.json to {proc_dir}/bids_filter_fmriprep.json (to be seen by Singularity container)") - shutil.copyfile(f"{CWD}/bids_filter.json", f"{proc_dir}/bids_filter_fmriprep.json") + shutil.copyfile(f"{CWD}/sample_bids_filter.json", f"{proc_dir}/bids_filter_fmriprep.json") # launch fmriprep run_fmriprep(participant_id, @@ -183,7 +183,7 @@ def run(participant_id: str, if __name__ == '__main__': # argparse HELPTEXT = """ - Script to run fMRIPrep + Script to run fMRIPrep """ parser = argparse.ArgumentParser(description=HELPTEXT) @@ -191,7 +191,7 @@ def run(participant_id: str, parser.add_argument('--global_config', type=str, help='path to global configs for a given nipoppy dataset') parser.add_argument('--participant_id', type=str, help='participant id') parser.add_argument('--session_id', type=str, help='session id for the participant') - parser.add_argument('--output_dir', type=str, default=None, + parser.add_argument('--output_dir', type=str, default=None, help='specify custom output dir (if None --> /derivatives)') parser.add_argument('--use_bids_filter', action='store_true', help='use bids filter or not') parser.add_argument('--anat_only', action='store_true', help='run only anatomical workflow or not') diff --git a/nipoppy/workflow/proc_pipe/mriqc/run_mriqc.py b/nipoppy/workflow/proc_pipe/mriqc/run_mriqc.py index c5e2d09d..e96e4662 100755 --- a/nipoppy/workflow/proc_pipe/mriqc/run_mriqc.py +++ b/nipoppy/workflow/proc_pipe/mriqc/run_mriqc.py @@ -106,6 +106,8 @@ def run(participant_id, global_configs, session_id, output_dir, modalities, logg except Exception as e: logger.error(f"mriqc run failed with exceptions: {e}") + return CMD + if __name__ == '__main__': diff --git a/.gitmodules b/nipoppy/workflow/proc_pipe/tractoflow/__init__.py similarity index 100% rename from .gitmodules rename to nipoppy/workflow/proc_pipe/tractoflow/__init__.py diff --git a/pyproject.toml b/pyproject.toml index fe33da62..14cf2231 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ dependencies = [ ] [project.optional-dependencies] -test = ["pytest"] +test = ["pytest", "pytest-cov", "datalad"] # alias in case of typo tests = ["nipoppy[test]"] @@ -30,6 +30,7 @@ packages = ["nipoppy"] [tool.hatch.build] include = [ "nipoppy/workflow/proc_pipe/tractoflow/tractoflow", + "nipoppy/workflow/proc_pipe/tractoflow", ] [tool.codespell] @@ -37,5 +38,15 @@ skip = '.git,.github,*.pdf,*.svg,pyproject.toml,*.ipynb,*.html,ppmi_imaging_desc ignore-words-list = 'te,ines' [tool.pytest.ini_options] -addopts = "-ra -q -vv " -testpaths = ["tests/"] \ No newline at end of file +addopts = "-ra -q -vv --cov nipoppy" +testpaths = ["tests/"] +norecursedirs = ["tests/data"] + +[tool.isort] +combine_as_imports = true +line_length = 79 +profile = "black" +skip_gitignore = true + +[tool.black] +line-length = 79 diff --git a/tests/Makefile b/tests/Makefile new file mode 100644 index 00000000..02fa4d87 --- /dev/null +++ b/tests/Makefile @@ -0,0 +1,7 @@ +.PHONY: data/ds004097 + +data/ds004097: + rm -fr data/ds004097 + datalad install -s ///openneuro/ds004097 data/ds004097 + cd data/ds004097 && datalad get sub-NDARDD890AYU/ses-01/anat/ -J 12 + cd data/ds004097 && datalad get sub-NDARDD890AYU/ses-01/dwi/ -J 12 diff --git a/tests/conftest.py b/tests/conftest.py index 074a6dd8..f27cc9ef 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,16 +1,17 @@ from __future__ import annotations import json +import shutil from pathlib import Path -def global_config_file() -> Path: +def _global_config_file() -> Path: return Path(__file__).parent / "data" / "test_global_configs.json" def global_config_for_testing(pth: Path) -> dict: """Set up configuration for testing and create required directories.""" - with open(global_config_file(), "r") as f: + with open(_global_config_file(), "r") as f: global_configs = json.load(f) global_configs["DATASET_ROOT"] = str(pth) @@ -18,3 +19,37 @@ def global_config_for_testing(pth: Path) -> dict: (pth / "bids").mkdir(parents=True, exist_ok=True) return global_configs + + +def create_dummy_bids_filter( + pth: Path, filename: str = "bids_filter.json" +) -> None: + """Use a modified content from the tractoflow sample.""" + bids_filter = { + "t1w": { + "datatype": "anat", + "session": "01", + "run": "1", + "suffix": "T1w", + }, + "dwi": {"session": "01", "run": "1", "suffix": "dwi"}, + } + + pth.mkdir(parents=True, exist_ok=True) + with open(pth / filename, "w") as f: + json.dump(bids_filter, f) + + +def mock_bids_dataset(pth: Path, dataset: str) -> Path: + """Copy a BIDS example dataset to the given path.""" + ds = Path(__file__).parent / "data" / dataset + print(f"\nCopying {ds} to {pth}\n") + shutil.copytree( + ds, + pth, + symlinks=True, + ignore_dangling_symlinks=True, + dirs_exist_ok=True, + ) + + return pth / "bids" diff --git a/tests/data/.gitignore b/tests/data/.gitignore new file mode 100644 index 00000000..7bf5c467 --- /dev/null +++ b/tests/data/.gitignore @@ -0,0 +1 @@ +ds** diff --git a/tests/data/test_global_configs.json b/tests/data/test_global_configs.json index 73d4120e..0085ab0e 100644 --- a/tests/data/test_global_configs.json +++ b/tests/data/test_global_configs.json @@ -3,7 +3,7 @@ "DATASET_ROOT": "", "CONTAINER_STORE": "", - + "SINGULARITY_PATH": "singularity", "TEMPLATEFLOW_DIR": "", @@ -13,7 +13,7 @@ "BIDS": { "heudiconv": { - "VERSION": "0.11.6", + "VERSION": "0.11.6", "CONTAINER": "heudiconv_{}.sif", "URL": "" }, @@ -35,6 +35,11 @@ "CONTAINER": "fmriprep_{}.sif", "URL": "" }, + "tractoflow": { + "VERSION": "XX.X.X", + "CONTAINER": "tractoflow_{}.sif", + "URL": "" + }, "freesurfer": { "VERSION": "6.0.1", "CONTAINER": "fmriprep_{}.sif", @@ -47,6 +52,6 @@ "PATH": "", "VERSION": "", "URL": "" - } + } } } diff --git a/tests/test_workflow_fmriprep.py b/tests/test_workflow_fmriprep.py index d4fa52cc..5cb6693a 100644 --- a/tests/test_workflow_fmriprep.py +++ b/tests/test_workflow_fmriprep.py @@ -1,46 +1,35 @@ from __future__ import annotations -import json +import logging from pathlib import Path import pytest -from .conftest import global_config_for_testing import nipoppy.workflow.logger as my_logger -import nipoppy.workflow.proc_pipe.fmriprep.run_fmriprep as fmriprep_module from nipoppy.workflow.proc_pipe.fmriprep.run_fmriprep import run, run_fmriprep +from .conftest import create_dummy_bids_filter, global_config_for_testing -def create_dummy_fs_license(pth: Path) -> None: + +def _create_dummy_fs_license(pth: Path) -> None: pth = pth / "freesurfer" pth.mkdir(parents=True, exist_ok=True) license_file = pth / "license.txt" license_file.write_text("dummy content") -def dummy_bids_filter_file() -> Path: - """TODO probably don't want the bids filter file to be in the module directory""" - pth = Path(fmriprep_module.__file__).parent - return pth / "bids_filter.json" - - -def create_dummy_bids_filter() -> None: - with open(dummy_bids_filter_file(), "w") as f: - json.dump({"dummy": "dummy"}, f) - - -def delete_dummy_bids_filter() -> None: - dummy_bids_filter_file().unlink(missing_ok=True) - - @pytest.mark.parametrize("use_bids_filter", [True, False]) @pytest.mark.parametrize("anat_only", [True, False]) -def test_run(tmp_path, use_bids_filter, anat_only): - create_dummy_fs_license(tmp_path) +def test_run(caplog, tmp_path, use_bids_filter, anat_only): + """Smoke test.""" + caplog.set_level(logging.CRITICAL) + + _create_dummy_fs_license(tmp_path) + + (tmp_path / "proc").mkdir(exist_ok=True) - # TODO because this is set up and torn down after each test - # we probably want to turn this into fixture - create_dummy_bids_filter() + if use_bids_filter: + create_dummy_bids_filter(tmp_path) participant_id = "01" @@ -59,16 +48,16 @@ def test_run(tmp_path, use_bids_filter, anat_only): logger=logger, ) - delete_dummy_bids_filter() - @pytest.mark.parametrize("use_bids_filter", [True, False]) @pytest.mark.parametrize("anat_only", [True, False]) -def test_run_fmriprep(tmp_path, use_bids_filter, anat_only): +def test_run_fmriprep(caplog, tmp_path, use_bids_filter, anat_only): + caplog.set_level(logging.CRITICAL) + log_file = tmp_path / "fmriprep.log" bids_dir = "bids_dir" - proc_dir = "fmripre_proc" + proc_dir = "fmriprep_proc" fmriprep_dir = tmp_path / "fmriprep_dir" fs_dir = "fs_dir" templateflow_dir = "templateflow_dir" @@ -89,35 +78,38 @@ def test_run_fmriprep(tmp_path, use_bids_filter, anat_only): ) # fmt: off expected_cmd = [ - 'singularity', 'run', + 'singularity', 'run', '-B', f'{bids_dir}:{bids_dir}', - '-B', f'{proc_dir}:/fmriprep_proc', - '-B', f'{fmriprep_dir}/output/fmriprep_home_{participant_id}/:/home/fmriprep', - '--home', '/home/fmriprep', - '--cleanenv', - '-B', f'{fmriprep_dir}/output/:/output', - '-B', f'{templateflow_dir}:/templateflow', - '-B', f'{fmriprep_dir}:/work', - '-B', f'{fs_dir}:/fsdir/', - singularity_container, '/data_dir', '/output', 'participant', - '--participant-label', participant_id, - '-w', '/work', - '--output-spaces', 'MNI152NLin2009cAsym:res-2', 'anat', 'fsnative', - '--fs-subjects-dir', '/fsdir/', - '--skip_bids_validation', - '--bids-database-dir', '/fmripre_proc/bids_db_fmriprep', - '--fs-license-file', '/fsdir/license.txt', - '--return-all-components', - '-v', - '--write-graph', - '--notrack', - '--omp-nthreads', '4', - '--nthreads', '8', + '-B', f'{fmriprep_dir}/output//fmriprep_home_{participant_id}/:/home/fmriprep', + '--home', '/home/fmriprep', + '--cleanenv', + '-B', f'{fmriprep_dir}/output/:/output', + '-B', 'fmriprep_proc:/fmriprep_proc', + '-B', f'{templateflow_dir}:/templateflow', + '-B', f'{fmriprep_dir}:/work', + '-B', f'{fs_dir}:/fsdir/', + singularity_container, 'bids_dir', '/output', 'participant', + '--participant-label', participant_id, + '-w', '/work', + '--output-spaces', 'MNI152NLin2009cAsym:res-2', 'anat', 'fsnative', + '--fs-subjects-dir', '/fsdir/', + '--skip_bids_validation', + '--bids-database-dir', '/fmriprep_proc/bids_db_fmriprep', + '--fs-license-file', '/fsdir/license.txt', + '--return-all-components', + '-v', + '--write-graph', + '--notrack', + '--omp-nthreads', '4', + '--nthreads', '8', '--mem_mb', '4000'] # fmt: on if use_bids_filter: - expected_cmd += ["--bids-filter-file", "/fmripre_proc/bids_filter_fmriprep.json"] + expected_cmd += [ + "--bids-filter-file", + "/fmriprep_proc/bids_filter_fmriprep.json", + ] if anat_only: expected_cmd += ["--anat-only"] diff --git a/tests/test_workflow_mriqc.py b/tests/test_workflow_mriqc.py new file mode 100644 index 00000000..8d164c06 --- /dev/null +++ b/tests/test_workflow_mriqc.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +import logging +from pathlib import Path + +import pytest + +import nipoppy.workflow.logger as my_logger +from nipoppy.workflow.proc_pipe.mriqc.run_mriqc import run + +from .conftest import global_config_for_testing + + +@pytest.mark.parametrize("logger", ["mriqc.log", None]) +@pytest.mark.parametrize("output_dir", ["tmp_path", None]) +@pytest.mark.parametrize("modalities", [["anat"], ["anat", "func"]]) +def test_run(caplog, tmp_path, output_dir, modalities, logger): + """Check that the proper command is generated.""" + caplog.set_level(logging.CRITICAL) + + participant_id = "01" + + session_id = "01" + + if logger is not None: + log_file = tmp_path / logger + logger = my_logger.get_logger(log_file) + else: + (tmp_path / "scratch" / "logs").mkdir(parents=True, exist_ok=True) + + global_configs = global_config_for_testing(tmp_path) + if output_dir == "tmp_path": + output_dir = tmp_path + expected_output_dir = output_dir + if output_dir is None: + expected_output_dir = ( + Path(global_configs["DATASET_ROOT"]) / "derivatives" + ) + + cmd = run( + participant_id=participant_id, + global_configs=global_config_for_testing(tmp_path), + session_id=session_id, + output_dir=output_dir, + modalities=modalities, + logger=logger, + ) + + # fmt: off + expected_cmd = ['singularity', 'run', + '-B', f'{str(tmp_path)}/bids/:{str(tmp_path)}/bids/:ro', + '-B', f'{str(tmp_path)}/proc/:/mriqc_proc', + '-B', f'{str(expected_output_dir)}/mriqc/23.1.0/output/:/out', + '-B', f'{str(expected_output_dir)}/mriqc/23.1.0/work/:/work', + '-B', ':/templateflow', + '--cleanenv', + 'mriqc_23.1.0.sif', f'{str(tmp_path)}/bids/', '/out', 'participant', + '--participant-label', participant_id, + '--session-id', session_id, + '--modalities'] + expected_cmd.extend(modalities) + expected_cmd.extend(['--no-sub', + '--work-dir', '/work', + '--bids-database-dir', '/mriqc_proc/bids_db_mriqc']) + # fmt: on + + assert cmd == expected_cmd + + +def test_logger_error(tmp_path, caplog): + """Check that a logging error is raised.""" + participant_id = "01" + session_id = "01" + output_dir = tmp_path + modalities = ["anat", "func"] + log_file = tmp_path / "mriqc.log" + logger = my_logger.get_logger(log_file) + + caplog.set_level(logging.ERROR) + + run( + participant_id=participant_id, + global_configs=global_config_for_testing(tmp_path), + session_id=session_id, + output_dir=output_dir, + modalities=modalities, + logger=logger, + ) + + for record in caplog.records: + if record.levelname == "ERROR": + assert "mriqc run failed" in caplog.text