From 5c649f56ce32aa17e93242a1307c93e60d2eab4a Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Fri, 13 Jan 2023 14:08:53 -0500 Subject: [PATCH 01/82] Update version. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 97a6ea4e..8059c201 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup, find_packages setup(name='clpipe', - version='1.7.2', + version='1.7.3', description='clpipe: MRI processing pipeline for high performance clusters', url='https://github.com/cohenlabUNC/clpipe', author='Maintainer: Teague Henry, Maintainer: Will Asciutto, Contributor: Deepak Melwani', From f130abebf321763fe0343427d8966d430b185bcb Mon Sep 17 00:00:00 2001 From: bhavithmanapoty Date: Mon, 23 Jan 2023 16:02:57 -0500 Subject: [PATCH 02/82] added line to change perms --- clpipe/project_setup.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index d0320424..9f265738 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -1,4 +1,4 @@ -import os +import os, stat import click from .config_json_parser import ClpipeConfigParser from pkg_resources import resource_stream @@ -19,7 +19,8 @@ def project_setup(project_title=None, project_dir=None, config_parser = ClpipeConfigParser() org_source = os.path.abspath(source_data) - add_file_handler(os.path.join(project_dir, "logs")) + add_file_handler(os.path.join(project_dir, "logs")) #This line created the clpipe.log file + os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IRWXO) logger = get_logger(STEP_NAME, debug=debug) org_source = os.path.abspath(source_data) From 53614a5983f447b6c257a048d8773b49d5311dc3 Mon Sep 17 00:00:00 2001 From: bhavithmanapoty Date: Mon, 23 Jan 2023 16:03:55 -0500 Subject: [PATCH 03/82] added line to change perms for log_file --- clpipe/project_setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index 9f265738..33bb8b4e 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -19,7 +19,7 @@ def project_setup(project_title=None, project_dir=None, config_parser = ClpipeConfigParser() org_source = os.path.abspath(source_data) - add_file_handler(os.path.join(project_dir, "logs")) #This line created the clpipe.log file + add_file_handler(os.path.join(project_dir, "logs")) os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IRWXO) logger = get_logger(STEP_NAME, debug=debug) From 4062098685fe061df89a25dd24117e931781a420 Mon Sep 17 00:00:00 2001 From: bhavithmanapoty Date: Thu, 26 Jan 2023 12:38:08 -0500 Subject: [PATCH 04/82] changed perms to group --- clpipe/project_setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index 33bb8b4e..8afdf589 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -20,7 +20,7 @@ def project_setup(project_title=None, project_dir=None, org_source = os.path.abspath(source_data) add_file_handler(os.path.join(project_dir, "logs")) - os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IRWXO) + os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IRWXG) logger = get_logger(STEP_NAME, debug=debug) org_source = os.path.abspath(source_data) From 71a7e28c3d792647db9819e0d263dbb2e59d1ed1 Mon Sep 17 00:00:00 2001 From: bhavithmanapoty Date: Thu, 26 Jan 2023 12:44:29 -0500 Subject: [PATCH 05/82] added code for templateFlowToggle empty --- clpipe/fmri_preprocess.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clpipe/fmri_preprocess.py b/clpipe/fmri_preprocess.py index 34b2b2bf..64fdbbcd 100644 --- a/clpipe/fmri_preprocess.py +++ b/clpipe/fmri_preprocess.py @@ -80,7 +80,7 @@ def fmriprep_process(bids_dir=None, working_dir=None, output_dir=None, template_1 = "" template_2 = "" - if config['FMRIPrepOptions']['TemplateFlowToggle']: + if config['FMRIPrepOptions']['TemplateFlowToggle'] != "": logger.debug("Template Flow toggle: ON") logger.debug(f"Template Flow path: {template_flow_path}") template_1 = TEMPLATE_1.format( From f8b097c105c68538856f454a148c446817ab6ec9 Mon Sep 17 00:00:00 2001 From: bhavithmanapoty Date: Tue, 31 Jan 2023 11:05:41 -0500 Subject: [PATCH 06/82] added username to logs and added guard for write perms --- clpipe/project_setup.py | 6 +++++- clpipe/utils.py | 23 +++++++++++++++-------- 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index 8afdf589..259af544 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -3,6 +3,7 @@ from .config_json_parser import ClpipeConfigParser from pkg_resources import resource_stream import json +import logging from .config import DEFAULT_CONFIG_PATH, DEFAULT_CONFIG_FILE_NAME from .utils import get_logger, add_file_handler @@ -20,8 +21,11 @@ def project_setup(project_title=None, project_dir=None, org_source = os.path.abspath(source_data) add_file_handler(os.path.join(project_dir, "logs")) - os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IRWXG) + os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IRWXU) logger = get_logger(STEP_NAME, debug=debug) + #ADD THESE LINES TO GET USERNAME IN EVERY LOG STATEMENT + extra = {"username": str(os.getlogin())} + logger = logging.LoggerAdapter(logger, extra) org_source = os.path.abspath(source_data) if move_source_data or symlink_source_data: diff --git a/clpipe/utils.py b/clpipe/utils.py index 78a0008b..fb80daf6 100644 --- a/clpipe/utils.py +++ b/clpipe/utils.py @@ -85,12 +85,19 @@ def add_file_handler(log_dir: os.PathLike, f_name: str="clpipe.log", # Create log handler logger.debug(f"Using log file: {log_file}") - f_handler = logging.FileHandler(log_file) - f_handler.setLevel(logging.DEBUG) - - # Create log format - f_format = logging.Formatter('%(asctime)s - %(levelname)s: %(name)s - %(message)s') - f_handler.setFormatter(f_format) - # Add handler to the logger - logger.addHandler(f_handler) + if((not log_file.is_file()) or (log_file.is_file() and os.access(log_file, os.W_OK))): + f_handler = logging.FileHandler(log_file) + f_handler.setLevel(logging.DEBUG) + + # Create log format + f_format = logging.Formatter('%(asctime)s - %(username)s - %(levelname)s: %(name)s - %(message)s') + f_handler.setFormatter(f_format) + + # Add handler to the logger + logger.addHandler(f_handler) + return + else: + logger.error(f"User does not have write permissions for the Log File at: {log_file}") + exit(1) + From d6e4555782311721201a1448d30eaf0f832ec20a Mon Sep 17 00:00:00 2001 From: bhavithmanapoty Date: Tue, 31 Jan 2023 11:25:54 -0500 Subject: [PATCH 07/82] improved code for custom logger --- clpipe/project_setup.py | 5 +---- clpipe/utils.py | 3 ++- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index 259af544..e45dd597 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -21,11 +21,8 @@ def project_setup(project_title=None, project_dir=None, org_source = os.path.abspath(source_data) add_file_handler(os.path.join(project_dir, "logs")) - os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IRWXU) + # os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IRWXU) logger = get_logger(STEP_NAME, debug=debug) - #ADD THESE LINES TO GET USERNAME IN EVERY LOG STATEMENT - extra = {"username": str(os.getlogin())} - logger = logging.LoggerAdapter(logger, extra) org_source = os.path.abspath(source_data) if move_source_data or symlink_source_data: diff --git a/clpipe/utils.py b/clpipe/utils.py index fb80daf6..e40f6175 100644 --- a/clpipe/utils.py +++ b/clpipe/utils.py @@ -68,9 +68,10 @@ def get_logger(name, debug=False, log_dir=None, f_name="clpipe.log"): if log_dir: add_file_handler(log_dir, f_name, logger=logger) + uName = {"username": os.getlogin()} + logger = logging.LoggerAdapter(logger, uName) return logger - def add_file_handler(log_dir: os.PathLike, f_name: str="clpipe.log", logger: logging.Logger = None): if not logger: From a04a7dbc9526301361033758e73568542b03e924 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 31 Jan 2023 16:47:56 -0500 Subject: [PATCH 08/82] Update the manual install python module version to 3.7.14. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index fbce2237..34f11b72 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ All clpipe commands should now be accessible. ### Manual Installation 1. Open a Longleaf terminal session -2. Switch to python 3.6.6 using `module add python/3.6.6` +2. Switch to python 3.7 using `module add python/3.7.14` 3. Install clpipe from GitHub with ```pip3 install --user --upgrade git+https://github.com/cohenlabUNC/clpipe.git``` From be465734f4f9e6ea19865a115aac2a2c111d86bc Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 31 Jan 2023 17:19:40 -0500 Subject: [PATCH 09/82] Remove milliseconds from the logger time format to shorten the line a bit. We don't need millisecond time resolution for this log file. --- clpipe/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/clpipe/utils.py b/clpipe/utils.py index e40f6175..efc2dbf4 100644 --- a/clpipe/utils.py +++ b/clpipe/utils.py @@ -92,7 +92,8 @@ def add_file_handler(log_dir: os.PathLike, f_name: str="clpipe.log", f_handler.setLevel(logging.DEBUG) # Create log format - f_format = logging.Formatter('%(asctime)s - %(username)s - %(levelname)s: %(name)s - %(message)s') + f_format = logging.Formatter('%(asctime)s - %(username)s - %(levelname)s: %(name)s - %(message)s', + datefmt="%Y-%m-%d %H:%M:%S") f_handler.setFormatter(f_format) # Add handler to the logger From 2ee201a01791bb69fb063bfe66b50261d5565ca9 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 1 Feb 2023 16:40:10 -0500 Subject: [PATCH 10/82] Update requirements documentation. --- docs/install.rst | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/docs/install.rst b/docs/install.rst index dde9adb2..189798f7 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -6,6 +6,8 @@ Installation Python Environment Setup ----------------------- +clpipe requires Python v3.7 + If you have priviledges to add python packages to a system, you can install the most recent version of clpipe with: @@ -27,6 +29,22 @@ Pip will automatically install all required Python package dependencies. External Dependencies ----------------------- +Singularity & Images +----------------------- + +clpipe uses Singularity to run certain dependencies as images. clpipe has been +tested against: + +- Singularity == v3.2.1 + +If you are a UNC-CH Longleaf user, Singularity is made available by default when launching +jobs, so you do not need to explicitly add this dependency. + +The following programs are required as images: + +- fMRIPrep >= v20 +- BIDS-validator >= v0.0.0 + If you don't already have a Singularity image of fMRIprep, head over to their `site `_ and follow the directions. You will have to change the fMRIprep image path in @@ -35,13 +53,16 @@ your configuration file. Similarly, if you do not have a copy of the BIDS-validator Singularity image, you'll need to obtain `this image `_ as well: +Other Dependencies +----------------------- + Additionally, clpipe requires the following tools to be installed in order to run its postprocessing and analysis steps (UNC-CH Users - this is handled by the clpipe module): -- FSL (recommended v6.0.0 +) -- AFNI (recommended v20.0.00 +) -- R (v4.0.0 +) +- FSL >= v6.0.0 +- AFNI >= v20.0.00 +- R >= v4.0.0 ----------------------- A Note for UNC-CH Users From 75dc93f83534317d6c8c20355cf8485449cbc56c Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 1 Feb 2023 17:18:11 -0500 Subject: [PATCH 11/82] Move the BIDS validator image reference to the BIDSValidationOptions block. --- clpipe/bids_validator.py | 14 ++++++++++---- clpipe/data/defaultConfig.json | 2 +- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/clpipe/bids_validator.py b/clpipe/bids_validator.py index 7a7f72c9..2b708cf1 100644 --- a/clpipe/bids_validator.py +++ b/clpipe/bids_validator.py @@ -45,6 +45,14 @@ def bids_validate(bids_dir=None, config_file=None, log_dir=None, debug=debug) batch_manager.update_mem_usage('3000') + # Validator image moved to BIDSValidationOptions block + # If the image isn't there, try looking in the old spot for backwards compatibility + # with clpipe <= v1.7.2 + try: + bids_validator_image = config.config['BIDSValidationOptions']['BIDSValidatorImage'] + except KeyError: + bids_validator_image = config.config['PostProcessingOptions']['BIDSValidatorImage'] + singularity_string = SINGULARITY_CMD_TEMPLATE if verbose: logger.debug("Verbose mode: on") @@ -52,16 +60,14 @@ def bids_validate(bids_dir=None, config_file=None, log_dir=None, if interactive: logger.info("Running BIDS validation interactively.") os.system(singularity_string.format( - validatorInstance=config - .config['PostProcessingOptions']['BIDSValidatorImage'], + validatorInstance=bids_validator_image, bidsDir=bids_dir, bindPaths=batch_manager.config['SingularityBindPaths'] )) logger.info("Validation complete.") else: batch_manager.addjob(Job("BIDSValidator", singularity_string.format( - validatorInstance=config - .config['PostProcessingOptions']['BIDSValidatorImage'], + validatorInstance=bids_validator_image, bidsDir=config.config['FMRIPrepOptions']['BIDSDirectory'], bindPaths=batch_manager.config['SingularityBindPaths'] ))) diff --git a/clpipe/data/defaultConfig.json b/clpipe/data/defaultConfig.json index 0f76e810..12464a68 100644 --- a/clpipe/data/defaultConfig.json +++ b/clpipe/data/defaultConfig.json @@ -15,6 +15,7 @@ "LogDirectory": "" }, "BIDSValidationOptions": { + "BIDSValidatorImage": "/proj/hng/singularity_imgs/validator.simg", "LogDirectory": "" }, "FMRIPrepOptions": { @@ -70,7 +71,6 @@ "PostProcessingTimeUsage": "8:0:0", "NThreads": "1", "SpectralInterpolationBinSize": 5000, - "BIDSValidatorImage": "/proj/hng/singularity_imgs/validator.simg", "LogDirectory": "" }, "PostProcessingOptions2": { From 8a33a6fb8a484e89682605778bf6d1ce8021b421 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 1 Feb 2023 17:22:36 -0500 Subject: [PATCH 12/82] Add dcm2niixOptions line in the default conversion config to set dcm2niix's maximum search depth. --- clpipe/data/defaultConvConfig.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/clpipe/data/defaultConvConfig.json b/clpipe/data/defaultConvConfig.json index c0ac9680..49024e9f 100644 --- a/clpipe/data/defaultConvConfig.json +++ b/clpipe/data/defaultConvConfig.json @@ -12,5 +12,6 @@ "ImageType": ["ORIG*", "PRIMARY", "M", "ND", "MOSAIC"] } } - ] + ], + "dcm2niixOptions": "-b y -ba y -z y -f '%3s_%f_%p_%t' -d 9" } From fe2abbb10ff90f7c459b1a36827347a0644b8db7 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 1 Feb 2023 17:26:54 -0500 Subject: [PATCH 13/82] Add defaults in the conversion config for anatomical and field map images to provide more examples. --- clpipe/data/defaultConvConfig.json | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/clpipe/data/defaultConvConfig.json b/clpipe/data/defaultConvConfig.json index 49024e9f..1f8ad522 100644 --- a/clpipe/data/defaultConvConfig.json +++ b/clpipe/data/defaultConvConfig.json @@ -1,5 +1,12 @@ { "descriptions": [ + { + "dataType": "anat", + "modalityLabel": "T1w", + "criteria": { + "SeriesDescription": "*t1*" + } + }, { "dataType": "func", "modalityLabel": "bold", @@ -11,7 +18,16 @@ "SeriesDescription":"*_srt", "ImageType": ["ORIG*", "PRIMARY", "M", "ND", "MOSAIC"] } - } + }, + { + "dataType": "fmap", + "modalityLabel": "epi", + "customLabels": "dir-AP", + "criteria": { + "SeriesDescription": "*AP*" + }, + "intendedFor": [1] + } ], "dcm2niixOptions": "-b y -ba y -z y -f '%3s_%f_%p_%t' -d 9" } From ae9ed3576681f1991bdd6d11b11c652ae54b836d Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 1 Feb 2023 17:27:39 -0500 Subject: [PATCH 14/82] Remove "image type" from default conversion config func example - usually is not necessary. --- clpipe/data/defaultConvConfig.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/clpipe/data/defaultConvConfig.json b/clpipe/data/defaultConvConfig.json index 1f8ad522..a9d720b1 100644 --- a/clpipe/data/defaultConvConfig.json +++ b/clpipe/data/defaultConvConfig.json @@ -15,8 +15,7 @@ "TaskName": "srt" }, "criteria": { - "SeriesDescription":"*_srt", - "ImageType": ["ORIG*", "PRIMARY", "M", "ND", "MOSAIC"] + "SeriesDescription":"*_srt" } }, { From eca751c5a35567e78ca4b3d28d614092768e17d9 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 1 Feb 2023 17:36:04 -0500 Subject: [PATCH 15/82] Stack some list options to make them easier to read. --- clpipe/data/defaultConfig.json | 55 +++++++++++++++++++++++++++------- 1 file changed, 44 insertions(+), 11 deletions(-) diff --git a/clpipe/data/defaultConfig.json b/clpipe/data/defaultConfig.json index 12464a68..dfb3063f 100644 --- a/clpipe/data/defaultConfig.json +++ b/clpipe/data/defaultConfig.json @@ -4,6 +4,15 @@ "ProjectDirectory": "", "EmailAddress": "", "TempDirectory": "", + "SourceOptions": { + "SourceURL": "", + "DropoffDirectory": "", + "TempDirectory": "", + "CommandLineOpts": "", + "TimeUsage": "1:0:0", + "MemUsage": "10000", + "CoreUsage": "1" + }, "DICOMToBIDSOptions": { "DICOMDirectory": "", "BIDSDirectory": "", @@ -28,7 +37,13 @@ "CommandLineOpts": "", "TemplateFlowToggle": true, "TemplateFlowPath": "", - "TemplateFlowTemplates": ["MNI152NLin2009cAsym", "MNI152NLin6Asym", "OASIS30ANTs", "MNIPediatricAsym", "MNIInfant"], + "TemplateFlowTemplates": [ + "MNI152NLin2009cAsym", + "MNI152NLin6Asym", + "OASIS30ANTs", + "MNIPediatricAsym", + "MNIInfant" + ], "FMapCleanupROIs": 3, "FMRIPrepMemoryUsage": "50000", "FMRIPrepTimeUsage": "16:0:0", @@ -45,14 +60,26 @@ "ConfoundSuffix": "desc-confounds_regressors.tsv", "DropCSV": "", "Regress": true, - "Confounds": ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z", - "csf", "white_matter", "global_signal", "a_comp_cor.*"], - "ConfoundsQuad": ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z", - "csf", "white_matter", "global_signal"], - "ConfoundsDerive": ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z", - "csf", "white_matter", "global_signal"], - "ConfoundsQuadDerive": ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z", - "csf", "white_matter", "global_signal"], + "Confounds": [ + "trans_x", "trans_y", "trans_z", + "rot_x", "rot_y", "rot_z", + "csf", "white_matter", "global_signal", "a_comp_cor.*" + ], + "ConfoundsQuad": [ + "trans_x", "trans_y", "trans_z", + "rot_x", "rot_y", "rot_z", + "csf", "white_matter", "global_signal" + ], + "ConfoundsDerive": [ + "trans_x", "trans_y", "trans_z", + "rot_x", "rot_y", "rot_z", + "csf", "white_matter", "global_signal" + ], + "ConfoundsQuadDerive": [ + "trans_x", "trans_y", "trans_z", + "rot_x", "rot_y", "rot_z", + "csf", "white_matter", "global_signal" + ], "FilteringHighPass": 0.008, "FilteringLowPass": -1, "FilteringOrder": 2, @@ -65,7 +92,10 @@ "ScrubBehind": 1, "ScrubContig": 2, "RespNotchFilter": true, - "MotionVars": ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"], + "MotionVars": [ + "trans_x", "trans_y", "trans_z", + "rot_x", "rot_y", "rot_z" + ], "RespNotchFilterBand":[0.31,0.43], "PostProcessingMemoryUsage": "20000", "PostProcessingTimeUsage": "8:0:0", @@ -116,7 +146,10 @@ } }, "ConfoundOptions": { - "Columns": ["csf", "csf_derivative1", "white_matter", "white_matter_derivative1"], + "Columns": [ + "csf", "csf_derivative1", + "white_matter", "white_matter_derivative1" + ], "MotionOutliers": { "Include": true, "ScrubVar": "framewise_displacement", From 4f4c846b61a7931030b6c9dab782b39d5dfa86c1 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 1 Feb 2023 17:41:15 -0500 Subject: [PATCH 16/82] Remove no longer used Python2 path. --- clpipe/data/defaultGLMConfig.json | 1 - 1 file changed, 1 deletion(-) diff --git a/clpipe/data/defaultGLMConfig.json b/clpipe/data/defaultGLMConfig.json index 35531edb..b1a03106 100644 --- a/clpipe/data/defaultGLMConfig.json +++ b/clpipe/data/defaultGLMConfig.json @@ -8,7 +8,6 @@ "TargetSuffix": "space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz", "WorkingDirectory": "FILL THIS IN", "TaskName": "gng_example", - "Python2Path": "/nas/longleaf/apps/python/2.7.12/bin/python", "ReferenceImage": "SET REFERENCE", "DummyScans": 0, "ApplyFMRIPREPMask": true, From bb5ec81b0ba92783eda2a8803c0c496973a9bbc2 Mon Sep 17 00:00:00 2001 From: bhavithmanapoty Date: Wed, 1 Feb 2023 21:02:17 -0500 Subject: [PATCH 17/82] fixed permissions of clpipe.log --- clpipe/project_setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index e45dd597..99f4b8f8 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -21,7 +21,7 @@ def project_setup(project_title=None, project_dir=None, org_source = os.path.abspath(source_data) add_file_handler(os.path.join(project_dir, "logs")) - # os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IRWXU) + os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IREAD | stat.S_IWRITE | stat.S_IRGRP | stat.S_IWGRP) logger = get_logger(STEP_NAME, debug=debug) org_source = os.path.abspath(source_data) From 41b9636a56d453c182a90de558a7e762960e1dd5 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 2 Feb 2023 13:53:40 -0500 Subject: [PATCH 18/82] Clean up list spacing on the default GLM file a bit. --- clpipe/data/defaultGLMConfig.json | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/clpipe/data/defaultGLMConfig.json b/clpipe/data/defaultGLMConfig.json index b1a03106..9128cb9b 100644 --- a/clpipe/data/defaultGLMConfig.json +++ b/clpipe/data/defaultGLMConfig.json @@ -22,8 +22,14 @@ "OutputSuffix": "resampled.nii.gz", "PrepareConfounds": true, "ConfoundSuffix": "desc-confounds_regressors.tsv", - "Confounds": ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"], - "ConfoundsDerive": ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"], + "Confounds": [ + "trans_x", "trans_y", "trans_z", + "rot_x", "rot_y", "rot_z" + ], + "ConfoundsDerive": [ + "trans_x", "trans_y", "trans_z", + "rot_x", "rot_y", "rot_z" + ], "ConfoundsQuad": [], "ConfoundsQuadDerive": [], "MotionOutliers": true, @@ -56,10 +62,11 @@ "FSFDir": "./fsf_test", "EVDirectory": "./EV_test", "ConfoundDirectory": "./confound_test", - "EVFileSuffices": ["gohit.txt", - "goomission.txt", - "comission.txt", - "correct.txt" + "EVFileSuffices": [ + "gohit.txt", + "goomission.txt", + "comission.txt", + "correct.txt" ], "ConfoundSuffix": "confounds.tsv", "OutputDir": "./test_l1feat", @@ -74,7 +81,8 @@ } ], "Level2Setups": [ - { "ModelName": "tworun_gngreg", + { + "ModelName": "tworun_gngreg", "FSFPrototype": "", "SubjectFile": "l2_sublist.csv", "FSFDir": "", @@ -88,6 +96,5 @@ "BatchConfig": "slurmUNCConfig.json" } } - ] } \ No newline at end of file From 99739c7ab4656d2c1d78291d7bb65eec1bd328c8 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 2 Feb 2023 13:53:56 -0500 Subject: [PATCH 19/82] Get rid of unused click import. --- clpipe/project_setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index d0320424..939c3808 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -1,5 +1,4 @@ import os -import click from .config_json_parser import ClpipeConfigParser from pkg_resources import resource_stream import json From 7513bc576dc7c179729b263f1a735ba6e1867369 Mon Sep 17 00:00:00 2001 From: bhavithmanapoty Date: Mon, 6 Feb 2023 13:21:40 -0500 Subject: [PATCH 20/82] removed redundant lines in project_setup --- clpipe/project_setup.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index d1fae6cc..8695a670 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -35,16 +35,6 @@ def project_setup(project_title=None, project_dir=None, config = config_parser.config - # Create the project directory - if not os.path.exists(project_dir): - os.makedirs(project_dir) - - bids_dir = config['DICOMToBIDSOptions']['BIDSDirectory'] - project_dir = config['ProjectDirectory'] - conv_config_path = config['DICOMToBIDSOptions']['ConversionConfig'] - - config = config_parser.config - # Create the project directory os.makedirs(project_dir, exist_ok=True) logger.info(f"Created project directory at: {project_dir}") From ee097c8d046244c98c4a23995976b09c0a60f46d Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 8 Feb 2023 13:44:14 -0500 Subject: [PATCH 21/82] Replace all instances of shutil.copy with shutil.copyfile. shutil.copy uses chmod under the hood to also copy the permissions of a file - this causes headaches when using clpipe in a multi-user environment. Uses of shutil.copy in testing files are preserved because they are only for a single user. Usage of shutil.copy2 in one file ignored to preserve its metadata copying capability. --- clpipe/config_json_parser.py | 2 +- clpipe/get_reports.py | 2 +- clpipe/glm_l2.py | 4 ++-- clpipe/glm_prepare.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/clpipe/config_json_parser.py b/clpipe/config_json_parser.py index 1e213ea2..18c7028a 100644 --- a/clpipe/config_json_parser.py +++ b/clpipe/config_json_parser.py @@ -127,7 +127,7 @@ def setup_glm(self, project_path): os.mkdir(os.path.join(project_path, "l2_gfeat_folders")) glm_config.config_json_dump(project_path, "glm_config.json") - shutil.copy(resource_filename('clpipe', 'data/l2_sublist.csv'), os.path.join(project_path, "l2_sublist.csv")) + shutil.copyfile(resource_filename('clpipe', 'data/l2_sublist.csv'), os.path.join(project_path, "l2_sublist.csv")) glm_config.config['GLMSetupOptions']['LogDirectory'] = os.path.join(project_path, "logs", "glm_setup_logs") os.mkdir(os.path.join(project_path, "logs", "glm_setup_logs")) diff --git a/clpipe/get_reports.py b/clpipe/get_reports.py index 041d9d1d..7b01a469 100644 --- a/clpipe/get_reports.py +++ b/clpipe/get_reports.py @@ -45,7 +45,7 @@ def get_reports(config_file, output_name, debug, clear_temp=True): logger.info(f"Copying reports...") for report in images: - shutil.copy(report, + shutil.copyfile(report, os.path.join(config.config['FMRIPrepOptions']['WorkingDirectory'], 'reports_temp', 'fmriprep_reports', os.path.basename(report))) diff --git a/clpipe/glm_l2.py b/clpipe/glm_l2.py index 10e606f1..62b6a3c3 100644 --- a/clpipe/glm_l2.py +++ b/clpipe/glm_l2.py @@ -144,13 +144,13 @@ def _apply_mumford_workaround(l1_feat_folder, logger, remove_reg_standard=False) logger.debug(( f"Copying identity matrix {identity_matrix_path}" f" to {func_to_standard_path}")) - shutil.copy(identity_matrix_path, func_to_standard_path) + shutil.copyfile(identity_matrix_path, func_to_standard_path) # Copy in the mean_func image as the reg folder standard, # imitating multiplication with the identity matrix. logger.debug( f"Copying mean func image {mean_func_path} to {standard_path}") - shutil.copy(mean_func_path, standard_path) + shutil.copyfile(mean_func_path, standard_path) except FileNotFoundError as e: print(e, "- skipping") diff --git a/clpipe/glm_prepare.py b/clpipe/glm_prepare.py index 4578bdec..8e28b601 100644 --- a/clpipe/glm_prepare.py +++ b/clpipe/glm_prepare.py @@ -295,13 +295,13 @@ def _apply_mumford_workaround(l1_feat_folder, logger, remove_reg_standard=False) logger.debug(( f"Copying identity matrix {identity_matrix_path}" f" to {func_to_standard_path}")) - shutil.copy(identity_matrix_path, func_to_standard_path) + shutil.copyfile(identity_matrix_path, func_to_standard_path) # Copy in the mean_func image as the reg folder standard, # imitating multiplication with the identity matrix. logger.debug( f"Copying mean func image {mean_func_path} to {standard_path}") - shutil.copy(mean_func_path, standard_path) + shutil.copyfile(mean_func_path, standard_path) except FileNotFoundError as e: print(e, "- skipping") From df16cbaa0c524752ccc05820a6779a2c6829f51f Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 8 Feb 2023 13:44:14 -0500 Subject: [PATCH 22/82] Fix an issue where FSFFileNotFoundError was being caught instead of FileNotFoundError, causing the program to stop when not intended. Also improve the log messages a bit for clarity and include an error counter. --- clpipe/glm_prepare.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/clpipe/glm_prepare.py b/clpipe/glm_prepare.py index 8e28b601..686be39e 100644 --- a/clpipe/glm_prepare.py +++ b/clpipe/glm_prepare.py @@ -203,16 +203,19 @@ def _glm_l2_propagate(l2_block, glm_setup_options, logger): if not os.path.exists(l2_block['FSFDir']): os.mkdir(l2_block['FSFDir']) + error_count = 0 + logger.info("Propogating fsf files...") for fsf in fsf_names: try: new_fsf = fsf_file_template target_dirs = sub_tab.loc[sub_tab["fsf_name"] == fsf].feat_folders counter = 1 - logger.info("Creating " + fsf) + logger.info("Creating L2 FEAT directory for: " + fsf) for feat in target_dirs: if not os.path.exists(feat): - raise FileNotFoundError("Cannot find "+ feat) + error_count += 1 + raise FileNotFoundError("ERROR: Could not find L1 FEAT directory: "+ feat) else: _apply_mumford_workaround(feat, logger, remove_reg_standard=True) new_fsf[image_files_ind[counter - 1]] = "set feat_files(" + str(counter) + ") \"" + os.path.abspath( @@ -230,9 +233,15 @@ def _glm_l2_propagate(l2_block, glm_setup_options, logger): with open(out_fsf, "w") as fsf_file: fsf_file.writelines(new_fsf) - except FSFFileNotFoundError as err: + except FileNotFoundError as err: logger.warn(err) + error_msg = "" + if error_count > 0: + error_msg = f" with {error_count} error(s)" + + logger.info(f"Job completed{error_msg}") + def glm_apply_mumford_workaround(glm_config_file=None, l1_feat_folders_path=None, From d10269c3a7b690ee249b6e2002273025462e77ba Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 8 Feb 2023 13:44:14 -0500 Subject: [PATCH 23/82] Reroute the legacy glm_l1_propagate and glm_l2_propagate commands to the new glm_prepare module. --- clpipe/cli.py | 12 ++-- clpipe/glm_l1.py | 125 ------------------------------------- clpipe/glm_l2.py | 159 ----------------------------------------------- 3 files changed, 6 insertions(+), 290 deletions(-) delete mode 100644 clpipe/glm_l1.py delete mode 100644 clpipe/glm_l2.py diff --git a/clpipe/cli.py b/clpipe/cli.py index 9701c42d..b487704c 100644 --- a/clpipe/cli.py +++ b/clpipe/cli.py @@ -429,9 +429,9 @@ def glm_l1_preparefsf_cli(glm_config_file, l1_name, debug): You must create a template .fsf file in FSL's FEAT GUI first. """ - from .glm_l1 import glm_l1_preparefsf - glm_l1_preparefsf( - glm_config_file=glm_config_file, l1_name=l1_name, debug=debug) + from .glm_prepare import glm_prepare + glm_prepare(glm_config_file=glm_config_file, level="L1", model=l1_name, + debug=debug) @click.command(L2_PREPARE_FSF_COMMAND_NAME, no_args_is_help=True) @@ -446,9 +446,9 @@ def glm_l2_preparefsf_cli(glm_config_file, l2_name, debug): You must create a group-level template .fsf file in FSL's FEAT GUI first. """ - from .glm_l2 import glm_l2_preparefsf - glm_l2_preparefsf(glm_config_file=glm_config_file, l2_name=l2_name, - debug=debug) + from .glm_prepare import glm_prepare + glm_prepare(glm_config_file=glm_config_file, level="L2", model=l2_name, + debug=debug) @click.command(APPLY_MUMFORD_COMMAND_NAME, no_args_is_help=True) diff --git a/clpipe/glm_l1.py b/clpipe/glm_l1.py deleted file mode 100644 index 5e163411..00000000 --- a/clpipe/glm_l1.py +++ /dev/null @@ -1,125 +0,0 @@ -""" -Design - -Step 1: Load in prototype, identify all needed lines -Step 2: Load in paths for all image files, exclude/include as needed -Step 3: Run through all required image files, construct the file changes -Step 3a: Change prototype and spit out into fsf output folder -Step 4: Launch a feat job for each fsf -""" - -import os -import glob -import logging -import sys -import nibabel as nib - -from .config_json_parser import GLMConfigParser -from .error_handler import exception_handler - - -def glm_l1_preparefsf(glm_config_file=None, l1_name=None, debug=None): - if not debug: - sys.excepthook = exception_handler - logging.basicConfig(level=logging.INFO) - else: - logging.basicConfig(level=logging.DEBUG) - glm_config = GLMConfigParser(glm_config_file) - - l1_block = [x for x in glm_config.config['Level1Setups'] if x['ModelName'] == str(l1_name)] - if len(l1_block) is not 1: - raise ValueError("L1 model not found, or multiple entries found.") - - l1_block = l1_block[0] - glm_setup_options = glm_config.config['GLMSetupOptions'] - - _glm_l1_propagate(l1_block, glm_setup_options) - - -def _glm_l1_propagate(l1_block, glm_setup_options): - with open(l1_block['FSFPrototype']) as f: - fsf_file_template=f.readlines() - - output_ind = [i for i,e in enumerate(fsf_file_template) if "set fmri(outputdir)" in e] - image_files_ind = [i for i,e in enumerate(fsf_file_template) if "set feat_files" in e] - ev_file_inds = [i for i,e in enumerate(fsf_file_template) if "set fmri(custom" in e] - confound_file_ind = [i for i,e in enumerate(fsf_file_template) if "set confoundev_files(1)" in e] - regstandard_ind = [i for i, e in enumerate(fsf_file_template) if "set fmri(regstandard)" in e] - tps_inds = [i for i, e in enumerate(fsf_file_template) if "set fmri(npts)" in e] - if l1_block['ImageIncludeList'] is not "" and l1_block['ImageExcludeList'] is not "": - raise ValueError("Only one of ImageIncludeList and ImageExcludeList should be non-empty") - - image_files = glob.glob(os.path.join(l1_block['TargetDirectory'], "**", "*"+l1_block['TargetSuffix']), recursive = True) - - if l1_block['ImageIncludeList'] is not "": - image_files = [file_path for file_path in image_files if os.path.basename(file_path) in l1_block['ImageIncludeList']] - base_names = [os.path.basename(file_path) for file_path in image_files] - - files_not_found = [file for file in base_names if file not in l1_block['ImageIncludeList']] - if len(files_not_found): - logging.warning("Did not find the following files: " + str(files_not_found)) - - if l1_block['ImageExcludeList'] is not "": - image_files = [file_path for file_path in image_files if - os.path.basename(file_path) not in l1_block['ImageExcludeList']] - - image_files = [file for file in image_files if - "task-" + glm_setup_options["TaskName"] in file] - - if not os.path.exists(l1_block['FSFDir']): - os.mkdir(l1_block['FSFDir']) - for file in image_files: - try: - logging.info("Creating FSF File for " + file) - img_data = nib.load(file) - total_tps = img_data.shape[3] - ev_conf = _get_ev_confound_mat(file, l1_block) - out_dir = os.path.join(l1_block['OutputDir'],os.path.basename(file).replace(l1_block["TargetSuffix"], ".feat")) - out_fsf = os.path.join(l1_block['FSFDir'], - os.path.basename(file).replace(l1_block["TargetSuffix"], ".fsf")) - new_fsf = fsf_file_template - - new_fsf[tps_inds[0]] = "set fmri(npts) " + str(total_tps) + "\n" - new_fsf[output_ind[0]] = "set fmri(outputdir) \"" + os.path.abspath(out_dir) + "\"\n" - new_fsf[image_files_ind[0]] = "set feat_files(1) \"" + os.path.abspath(file) + "\"\n" - - if glm_setup_options['ReferenceImage'] is not "": - new_fsf[regstandard_ind[0]] = "set fmri(regstandard) \"" + os.path.abspath(glm_setup_options['ReferenceImage']) + "\"\n" - if l1_block['ConfoundSuffix'] is not "": - new_fsf[confound_file_ind[0]] = "set confoundev_files(1) \"" + os.path.abspath(ev_conf['Confounds'][0]) + "\"\n" - - for i, e in enumerate(ev_conf['EVs']): - new_fsf[ev_file_inds[i]] = "set fmri(custom" + str(i +1) + ") \"" + os.path.abspath(e) + "\"\n" - - - - with open(out_fsf, "w") as fsf_file: - fsf_file.writelines(new_fsf) - - except Exception as err: - logging.exception(err) - - -def _get_ev_confound_mat(file_name, l1_block): - - file_prefix = os.path.basename(file_name).replace(l1_block["TargetSuffix"], "") - EV_files = [glob.glob(os.path.join(l1_block["EVDirectory"],"**",file_prefix + EV), recursive=True) for EV in l1_block['EVFileSuffices']] - EV_files = [item for sublist in EV_files for item in sublist] - - if len(EV_files) is not len(l1_block['EVFileSuffices']): - raise FileNotFoundError(( - f"Did not find enough EV files for scan {file_name}. " - f"Only found {len(EV_files)} and need " - f"{len(l1_block['EVFileSuffices'])}" - )) - - if l1_block["ConfoundSuffix"] is not "": - confound_file = glob.glob(os.path.join(l1_block["ConfoundDirectory"],"**",file_prefix + l1_block['ConfoundSuffix']), recursive = True) - if len(confound_file) is not 1: - raise FileNotFoundError("Did not find a confound file for this scan") - return {"EVs": EV_files, "Confounds": confound_file} - - return {"EVs": EV_files} - - - diff --git a/clpipe/glm_l2.py b/clpipe/glm_l2.py deleted file mode 100644 index 62b6a3c3..00000000 --- a/clpipe/glm_l2.py +++ /dev/null @@ -1,159 +0,0 @@ -import os -import logging -import shutil -from pathlib import Path -import pandas as pd - -from .config import * -from .config_json_parser import GLMConfigParser -from .utils import get_logger - - -def glm_l2_preparefsf(glm_config_file=None, l2_name=None, debug=None): - logger = get_logger(L1_PREPARE_FSF_COMMAND_NAME, debug=debug) - - glm_config = GLMConfigParser(glm_config_file) - - l2_block = [x for x in glm_config.config['Level2Setups'] \ - if x['ModelName'] == str(l2_name)] - if len(l2_block) is not 1: - raise ValueError("L2 model not found, or multiple entries found.") - - l2_block = l2_block[0] - glm_setup_options = glm_config.config['GLMSetupOptions'] - - _glm_l2_propagate(l2_block, glm_setup_options, logger) - - -def _glm_l2_propagate(l2_block, glm_setup_options, logger): - subject_file = l2_block['SubjectFile'] - prototype_file = l2_block['FSFPrototype'] - - logger.info(f"Reading subject file: {subject_file}") - sub_tab = pd.read_csv(subject_file) - - logger.info(f"Opening prototype file: {prototype_file}") - with open(prototype_file) as f: - fsf_file_template=f.readlines() - - output_ind = [ - i for i,e in enumerate(fsf_file_template) if "set fmri(outputdir)" in e - ] - image_files_ind = [ - i for i,e in enumerate(fsf_file_template) if "set feat_files" in e - ] - regstandard_ind = [ - i for i, e in enumerate(fsf_file_template) if "set fmri(regstandard)" in e - ] - - sub_tab = sub_tab.loc[sub_tab['L2_name'] == l2_block['ModelName']] - - fsf_names = sub_tab.fsf_name.unique() - - if not os.path.exists(l2_block['FSFDir']): - os.mkdir(l2_block['FSFDir']) - - for fsf in fsf_names: - try: - new_fsf = fsf_file_template - target_dirs = sub_tab.loc[sub_tab["fsf_name"] == fsf].feat_folders - counter = 1 - logger.info("Creating " + fsf) - for feat in target_dirs: - if not os.path.exists(feat): - raise FileNotFoundError("Cannot find "+ feat) - else: - _apply_mumford_workaround(feat, logger, remove_reg_standard=True) - new_fsf[image_files_ind[counter - 1]] = "set feat_files(" + str(counter) + ") \"" + os.path.abspath( - feat) + "\"\n" - counter = counter + 1 - - out_dir = os.path.join(l2_block['OutputDir'], fsf + ".gfeat") - new_fsf[output_ind[0]] = "set fmri(outputdir) \"" + os.path.abspath(out_dir) + "\"\n" - out_fsf = os.path.join(l2_block['FSFDir'], - fsf + ".fsf") - - if glm_setup_options['ReferenceImage'] is not "": - new_fsf[regstandard_ind[0]] = "set fmri(regstandard) \"" + os.path.abspath(glm_setup_options['ReferenceImage']) + "\"\n" - - with open(out_fsf, "w") as fsf_file: - fsf_file.writelines(new_fsf) - - except Exception as err: - logging.exception(err) - - -def glm_apply_mumford_workaround(glm_config_file=None, - l1_feat_folders_path=None, - remove_reg_standard=False, - debug=False): - - logger = get_logger(APPLY_MUMFORD_COMMAND_NAME, debug=debug) - if glm_config_file: - glm_config = GLMConfigParser(glm_config_file).config - l1_feat_folders_path = glm_config["Level1Setups"]["OutputDir"] - - logger.info(f"Applying Mumford workaround to: {l1_feat_folders_path}") - for l1_feat_folder in os.scandir(l1_feat_folders_path): - if os.path.isdir(l1_feat_folder): - logger.info(f"Processing L1 FEAT folder: {l1_feat_folder.path}") - _apply_mumford_workaround(l1_feat_folder, logger, - remove_reg_standard=remove_reg_standard) - - logger.info(f"Finished applying Mumford workaround.") - - -def _apply_mumford_workaround(l1_feat_folder, logger, remove_reg_standard=False): - """ - When using an image registration other than FSL's, such as fMRIPrep's, - this work-around is necessary to run FEAT L2 analysis in FSL. - - See: https://mumfordbrainstats.tumblr.com/post/166054797696/ - feat-registration-workaround - """ - l1_feat_folder = Path(l1_feat_folder) - l1_feat_reg_folder = l1_feat_folder / "reg" - - # Create the reg directory if it doesn't exist - # This happens if FEAT's preprocessing was not used - if not l1_feat_reg_folder.exists(): - l1_feat_reg_folder.mkdir() - else: - # Remove all of the .mat files in the reg folder - for mat in l1_feat_reg_folder.glob("*.mat"): - logger.debug(f"Removing: {mat}") - os.remove(mat) - - if remove_reg_standard: - # Delete the reg_standard folder if it exists - reg_standard_path = l1_feat_folder / "reg_standard" - if reg_standard_path.exists(): - logger.debug(f"Removing: {reg_standard_path}") - shutil.rmtree(reg_standard_path) - - try: - # Grab the FSLDIR environment var to get path to standard matrices - fsl_dir = Path(os.environ["FSLDIR"]) - identity_matrix_path = fsl_dir / 'etc/flirtsch/ident.mat' - func_to_standard_path = \ - l1_feat_reg_folder / "example_func2standard.mat" - mean_func_path = l1_feat_folder / 'mean_func.nii.gz' - standard_path = l1_feat_reg_folder / "standard.nii.gz" - - # Copy over the standard identity matrix - logger.debug(( - f"Copying identity matrix {identity_matrix_path}" - f" to {func_to_standard_path}")) - shutil.copyfile(identity_matrix_path, func_to_standard_path) - - # Copy in the mean_func image as the reg folder standard, - # imitating multiplication with the identity matrix. - logger.debug( - f"Copying mean func image {mean_func_path} to {standard_path}") - shutil.copyfile(mean_func_path, standard_path) - except FileNotFoundError as e: - print(e, "- skipping") - - - - From a5500cacea8d754f26f6868803c345309f825419 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 8 Feb 2023 13:59:59 -0500 Subject: [PATCH 24/82] Add "scans.json" to default .bidsignore file. --- clpipe/config_json_parser.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/clpipe/config_json_parser.py b/clpipe/config_json_parser.py index 18c7028a..eeceaaba 100644 --- a/clpipe/config_json_parser.py +++ b/clpipe/config_json_parser.py @@ -199,7 +199,9 @@ def setup_dcm2bids(self, dicom_directory, heuristic_file, output_directory, dico if not os.path.exists(bids_ignore_path): with open(bids_ignore_path, 'w') as bids_ignore_file: # Ignore dcm2bid's auto-generated directory - bids_ignore_file.write("tmp_dcm2bids") + bids_ignore_file.write("tmp_dcm2bids\n") + # Ignore heudiconv's auto-generated scan file + bids_ignore_file.write("scans.json\n") def setup_bids_validation(self, log_dir=None): if log_dir is not None: From 5fce5a7cbe793fa25a398a882742c19ddc4c1e91 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 11:37:56 -0500 Subject: [PATCH 25/82] Fix the reference to the "open" command when opening a custom batch config file. --- clpipe/batch_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clpipe/batch_manager.py b/clpipe/batch_manager.py index c0fb9ffe..ea0834ee 100644 --- a/clpipe/batch_manager.py +++ b/clpipe/batch_manager.py @@ -27,7 +27,7 @@ def __init__(self, batch_system_config: os.PathLike, output_directory=None, if os.path.exists(os.path.abspath(batch_system_config)): self.logger.debug(f"Using batch config at: {batch_system_config}") - with os.open(os.path.abspath(batch_system_config)) as bat_config: + with open(os.path.abspath(batch_system_config)) as bat_config: self.config = json.load(bat_config) else: with resource_stream( From 812efc9c5094370f7d53bc4de8930bd82eb86fe1 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 16:35:18 -0500 Subject: [PATCH 26/82] Remove redundant hard coded reference to the fmriprep dir. This is taken care of in "resolve_fmriprep_dir" --- clpipe/get_reports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clpipe/get_reports.py b/clpipe/get_reports.py index 7b01a469..71fab837 100644 --- a/clpipe/get_reports.py +++ b/clpipe/get_reports.py @@ -41,7 +41,7 @@ def get_reports(config_file, output_name, debug, clear_temp=True): copy_tree(os.path.join(ses, 'figures'), os.path.join(config.config['FMRIPrepOptions']['WorkingDirectory'], 'reports_temp', 'fmriprep_reports', os.path.basename(sub),os.path.basename(ses), 'figures')) - images = glob.glob(os.path.join(fmriprepdir, 'fmriprep', '*.html')) + images = glob.glob(os.path.join(fmriprepdir, '*.html')) logger.info(f"Copying reports...") for report in images: From 01dc5f4359e485c92f511c7a78bf0d933649ff6e Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 17:55:09 -0500 Subject: [PATCH 27/82] Reorganize test files into folders by test type. Clean up the test_setup_configuration module. --- tests/{ => postprocess2}/nipype.cfg | 0 tests/{ => postprocess2}/test_postprocess2.py | 0 .../test_postprocess2_func.py | 0 .../test_postprocess2_wfs.py | 0 tests/{ => setup}/test_setup_configuration.py | 67 ++++++++++--------- 5 files changed, 34 insertions(+), 33 deletions(-) rename tests/{ => postprocess2}/nipype.cfg (100%) rename tests/{ => postprocess2}/test_postprocess2.py (100%) rename tests/{ => postprocess2}/test_postprocess2_func.py (100%) rename tests/{ => postprocess2}/test_postprocess2_wfs.py (100%) rename tests/{ => setup}/test_setup_configuration.py (51%) diff --git a/tests/nipype.cfg b/tests/postprocess2/nipype.cfg similarity index 100% rename from tests/nipype.cfg rename to tests/postprocess2/nipype.cfg diff --git a/tests/test_postprocess2.py b/tests/postprocess2/test_postprocess2.py similarity index 100% rename from tests/test_postprocess2.py rename to tests/postprocess2/test_postprocess2.py diff --git a/tests/test_postprocess2_func.py b/tests/postprocess2/test_postprocess2_func.py similarity index 100% rename from tests/test_postprocess2_func.py rename to tests/postprocess2/test_postprocess2_func.py diff --git a/tests/test_postprocess2_wfs.py b/tests/postprocess2/test_postprocess2_wfs.py similarity index 100% rename from tests/test_postprocess2_wfs.py rename to tests/postprocess2/test_postprocess2_wfs.py diff --git a/tests/test_setup_configuration.py b/tests/setup/test_setup_configuration.py similarity index 51% rename from tests/test_setup_configuration.py rename to tests/setup/test_setup_configuration.py index 7d1d9140..ce678e6e 100644 --- a/tests/test_setup_configuration.py +++ b/tests/setup/test_setup_configuration.py @@ -1,12 +1,9 @@ -import sys -sys.path.append('../clpipe') - import pytest from pathlib import Path -@pytest.mark.skip(reason="To Fix") + def test_setup_project_missing(clpipe_dir, project_paths): - + """Check if any expected clpipe setup fails to create any expect folders or files.""" missing = project_paths for path in clpipe_dir.glob('**/**/*'): @@ -16,9 +13,9 @@ def test_setup_project_missing(clpipe_dir, project_paths): assert len(missing) == 0, f"Missing expected paths: {missing}" -@pytest.mark.skip(reason="Not implemented") + def test_setup_project_extra(clpipe_dir, project_paths): - + """Check to see if clpipe setup creates any extra, unexpected folders or files.""" extra = [] for path in clpipe_dir.glob('**/**/*'): @@ -31,44 +28,48 @@ def test_setup_project_extra(clpipe_dir, project_paths): @pytest.fixture() def project_paths(): + # TODO: We should eventually just pull these constants from central config + + data_BIDS = Path("data_BIDS") + data_postproc = Path("data_postproc") + data_ROI_ts = Path("data_ROI_ts") + logs = Path("logs") + """List of expected relative project paths. Path is used over strings for os abstraction.""" return [ Path("analyses"), Path("data_DICOMs"), - Path("data_BIDS"), - Path('data_BIDS/code'), - Path('data_BIDS/derivatives'), - Path('data_BIDS/sourcedata'), - Path('data_BIDS/dataset_description.json'), - Path('data_BIDS/participants.json'), - Path('data_BIDS/participants.tsv'), - Path('data_BIDS/README'), - Path('data_BIDS/CHANGES'), + data_BIDS, + data_BIDS / 'code', + data_BIDS / 'derivatives', + data_BIDS / 'sourcedata', + data_BIDS / 'dataset_description.json', + data_BIDS / 'participants.json', + data_BIDS / 'participants.tsv', + data_BIDS / 'README', + data_BIDS / 'CHANGES', + data_BIDS / '.bidsignore', Path("data_fmriprep"), Path("data_GLMPrep"), Path("data_onsets"), - Path("data_postproc"), - Path("data_postproc", "betaseries_default"), - Path("data_postproc", "betaseries_noGSR"), - Path("data_postproc", "betaseries_noScrub"), - Path("data_postproc", "postproc_default"), - Path("data_postproc", "postproc_noGSR"), - Path("data_postproc", "postproc_noScrub"), - Path("data_postproc", "normalized"), - Path("data_ROI_ts"), - Path('data_ROI_ts/postproc_default'), + data_postproc, + data_postproc / "betaseries_default", + data_postproc / "postproc_default", + data_ROI_ts, + data_ROI_ts / 'postproc_default', Path("l1_feat_folders"), Path("l1_fsfs"), Path("l2_fsfs"), Path("l2_gfeat_folders"), Path("logs"), - Path("logs", "betaseries_logs"), - Path("logs", "DCM2BIDS_logs"), - Path("logs", "glm_setup_logs"), - Path("logs", "postproc_logs"), - Path("logs", "ROI_extraction_logs"), - Path("logs", "intensity_normalization_logs"), - Path("logs", "SUSAN_logs"), + logs / "betaseries_logs", + logs / "bids_validation_logs", + logs / "DCM2BIDS_logs", + logs / "glm_setup_logs", + logs / "postproc_logs", + logs / "ROI_extraction_logs", + logs / "SUSAN_logs", + logs / "clpipe.log", Path("scripts"), Path("clpipe_config.json"), Path("conversion_config.json"), From 3cd2d1e991d617a8505353f86098c7afd994c2c1 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 17:55:34 -0500 Subject: [PATCH 28/82] Rename test_setup file. --- tests/setup/{test_setup_configuration.py => test_setup.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/setup/{test_setup_configuration.py => test_setup.py} (100%) diff --git a/tests/setup/test_setup_configuration.py b/tests/setup/test_setup.py similarity index 100% rename from tests/setup/test_setup_configuration.py rename to tests/setup/test_setup.py From ffbf3916476eca44a3db125ff0d7dbbf6545e45c Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 18:05:51 -0500 Subject: [PATCH 29/82] Adjust log message. --- clpipe/project_setup.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index 8695a670..fb78ea4b 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -25,9 +25,8 @@ def project_setup(project_title=None, project_dir=None, org_source = os.path.abspath(source_data) if move_source_data or symlink_source_data: - source_data = os.path.join(os.path.abspath(project_dir), - DEFAULT_DICOM_DIR) - logger.debug(f"Created path for source directory at: {source_data}") + source_data = os.path.join(os.path.abspath(project_dir), DEFAULT_DICOM_DIR) + logger.debug(f"Using source directory: {source_data}") logger.info(f"Starting project setup with title: {project_title}") From 75ac2c1db4bde80f8681a350e7e38c154e5db9d9 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 18:07:12 -0500 Subject: [PATCH 30/82] Add explanation of os.chmod command. --- tests/setup/test_setup.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/setup/test_setup.py b/tests/setup/test_setup.py index ce678e6e..ea27f24f 100644 --- a/tests/setup/test_setup.py +++ b/tests/setup/test_setup.py @@ -1,8 +1,12 @@ import pytest from pathlib import Path +def test_setup(): + + pass + -def test_setup_project_missing(clpipe_dir, project_paths): +def test_setup_missing(clpipe_dir, project_paths): """Check if any expected clpipe setup fails to create any expect folders or files.""" missing = project_paths @@ -14,7 +18,7 @@ def test_setup_project_missing(clpipe_dir, project_paths): assert len(missing) == 0, f"Missing expected paths: {missing}" -def test_setup_project_extra(clpipe_dir, project_paths): +def test_setup_extra(clpipe_dir, project_paths): """Check to see if clpipe setup creates any extra, unexpected folders or files.""" extra = [] From cb948570abf738293766e88811e755783f62ff17 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 18:07:23 -0500 Subject: [PATCH 31/82] Add explanation of chmod command. --- clpipe/project_setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index fb78ea4b..e21cfcde 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -20,7 +20,9 @@ def project_setup(project_title=None, project_dir=None, org_source = os.path.abspath(source_data) add_file_handler(os.path.join(project_dir, "logs")) - os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), stat.S_IREAD | stat.S_IWRITE | stat.S_IRGRP | stat.S_IWGRP) + # Set permissions to clpipe.log file to allow for group write + os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), + stat.S_IREAD | stat.S_IWRITE | stat.S_IRGRP | stat.S_IWGRP) logger = get_logger(STEP_NAME, debug=debug) org_source = os.path.abspath(source_data) From 6b94d45a027ac717b8f1da418861149b16cccc97 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 18:11:23 -0500 Subject: [PATCH 32/82] Simply default_dicom_dir logic. Remove unused move_source_data check. --- clpipe/project_setup.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index e21cfcde..f7cc4e68 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -26,9 +26,7 @@ def project_setup(project_title=None, project_dir=None, logger = get_logger(STEP_NAME, debug=debug) org_source = os.path.abspath(source_data) - if move_source_data or symlink_source_data: - source_data = os.path.join(os.path.abspath(project_dir), DEFAULT_DICOM_DIR) - logger.debug(f"Using source directory: {source_data}") + default_dicom_dir = os.path.join(os.path.abspath(project_dir), DEFAULT_DICOM_DIR) logger.info(f"Starting project setup with title: {project_title}") @@ -45,10 +43,10 @@ def project_setup(project_title=None, project_dir=None, conv_config_path = config['DICOMToBIDSOptions']['ConversionConfig'] if symlink_source_data: - logger.info('Creating SymLink for source data to project/data_DICOMs') + logger.info(f'Creating SymLink for source data to {default_dicom_dir}') os.symlink( os.path.abspath(org_source), - os.path.join(os.path.abspath(project_dir), DEFAULT_DICOM_DIR) + default_dicom_dir ) # Create an empty BIDS directory From fa4d3981b0e7c2dc9b3b3a3c14624f53dae16ab5 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 18:14:06 -0500 Subject: [PATCH 33/82] Let the user know move_source_data isn't implemented. --- clpipe/project_setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index f7cc4e68..2a1cc7a5 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -17,7 +17,6 @@ def project_setup(project_title=None, project_dir=None, symlink_source_data=None, debug=False): config_parser = ClpipeConfigParser() - org_source = os.path.abspath(source_data) add_file_handler(os.path.join(project_dir, "logs")) # Set permissions to clpipe.log file to allow for group write @@ -48,6 +47,8 @@ def project_setup(project_title=None, project_dir=None, os.path.abspath(org_source), default_dicom_dir ) + elif move_source_data: + raise NotImplementedError("Option -move_source_data is not yet implemented.") # Create an empty BIDS directory os.system(DCM2BIDS_SCAFFOLD_TEMPLATE.format(bids_dir)) From eb86674f2017ed54d1c92efac1ea996112bee490 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 18:32:54 -0500 Subject: [PATCH 34/82] Improve the logic for checking source path. --- clpipe/project_setup.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index 2a1cc7a5..62986771 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -2,7 +2,7 @@ from .config_json_parser import ClpipeConfigParser from pkg_resources import resource_stream import json -import logging +import sys from .config import DEFAULT_CONFIG_PATH, DEFAULT_CONFIG_FILE_NAME from .utils import get_logger, add_file_handler @@ -13,8 +13,8 @@ def project_setup(project_title=None, project_dir=None, - source_data=None, move_source_data=None, - symlink_source_data=None, debug=False): + source_data=None, move_source_data=False, + symlink_source_data=False, debug=False): config_parser = ClpipeConfigParser() @@ -24,9 +24,21 @@ def project_setup(project_title=None, project_dir=None, stat.S_IREAD | stat.S_IWRITE | stat.S_IRGRP | stat.S_IWGRP) logger = get_logger(STEP_NAME, debug=debug) - org_source = os.path.abspath(source_data) default_dicom_dir = os.path.join(os.path.abspath(project_dir), DEFAULT_DICOM_DIR) + if symlink_source_data and not source_data: + logger.error("A source data path is required when using a symlinked source.") + sys.exit(1) + elif move_source_data and not source_data: + logger.error("A source data path is required when moving source data.") + sys.exit(1) + elif source_data: + logger.info(f"Referencing source data: {source_data}") + source_data = os.path.abspath(source_data) + else: + logger.info(f"No source data specified.") + source_data = default_dicom_dir + logger.info(f"Starting project setup with title: {project_title}") config_parser.setup_project(project_title, project_dir, source_data) @@ -44,7 +56,7 @@ def project_setup(project_title=None, project_dir=None, if symlink_source_data: logger.info(f'Creating SymLink for source data to {default_dicom_dir}') os.symlink( - os.path.abspath(org_source), + source_data, default_dicom_dir ) elif move_source_data: From a878e533e2053483c7fd0d3c69a5667efbabe3c2 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 18:39:10 -0500 Subject: [PATCH 35/82] Check that the user didn't try both symlink_source_data and move_source_data. --- clpipe/project_setup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index 62986771..cc1a4a47 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -26,6 +26,9 @@ def project_setup(project_title=None, project_dir=None, default_dicom_dir = os.path.join(os.path.abspath(project_dir), DEFAULT_DICOM_DIR) + if symlink_source_data and move_source_data: + logger.error("Cannot choose to both move and symlink the source data.") + sys.exit(1) if symlink_source_data and not source_data: logger.error("A source data path is required when using a symlinked source.") sys.exit(1) From d92490ca251d719c8ecceb72bb0321d9b1aa9dda Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 18:39:29 -0500 Subject: [PATCH 36/82] Add tests for project_setup. --- tests/setup/test_setup.py | 41 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/tests/setup/test_setup.py b/tests/setup/test_setup.py index ea27f24f..964ef7b8 100644 --- a/tests/setup/test_setup.py +++ b/tests/setup/test_setup.py @@ -1,8 +1,47 @@ import pytest from pathlib import Path -def test_setup(): +def test_setup_no_source(): + """Check that clpipe creates an empty data_DICOMs folder in the project + directory when no source is provided. + + project_setup(project_title="Test", project_dir="TestDir", + source_data=None, move_source_data=False, + symlink_source_data=False, debug=False) + """ + pass + +def test_setup_referenced_source(): + """Check that clpipe's generated config file references a specified source + directory that is not within the clpipe project directory. This variant + should not create a data_DICOMs directory. + + project_setup(project_title="Test", project_dir="TestDir", + source_data="TestSource", move_source_data=False, + symlink_source_data=False, debug=False) + """ + pass + +def test_setup_symlink_source(): + """Check that clpipe creates a data_DICOMs dir and symlinks it to the given + source data. + + project_setup(project_title="Test", project_dir="TestDir", + source_data="TestSource", move_source_data=False, + symlink_source_data=True, debug=False) + """ + pass + +def test_setup_move_source(): + """Note: this is currently NOT IMPLEMENTED in project setup. + + Check that clpipe creates a data_DICOMs dir and moves the data from a given + source directory to this directory. Data should not remain in the source directory. + project_setup(project_title="Test", project_dir="TestDir", + source_data="TestSource", move_source_data=True, + symlink_source_data=False, debug=False) + """ pass From 1517c64e558af138a3f6e4ac79ec07c509b755b5 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 19:31:04 -0500 Subject: [PATCH 37/82] Replace the os lib references with the more succinct pathlib.Path datatype. --- clpipe/project_setup.py | 41 +++++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/clpipe/project_setup.py b/clpipe/project_setup.py index cc1a4a47..29869299 100644 --- a/clpipe/project_setup.py +++ b/clpipe/project_setup.py @@ -3,6 +3,7 @@ from pkg_resources import resource_stream import json import sys +from pathlib import Path from .config import DEFAULT_CONFIG_PATH, DEFAULT_CONFIG_FILE_NAME from .utils import get_logger, add_file_handler @@ -18,13 +19,16 @@ def project_setup(project_title=None, project_dir=None, config_parser = ClpipeConfigParser() - add_file_handler(os.path.join(project_dir, "logs")) + project_dir = Path(project_dir).resolve() + logs_dir = project_dir / "logs" + + add_file_handler(logs_dir) # Set permissions to clpipe.log file to allow for group write - os.chmod(os.path.join(os.path.join(project_dir, "logs"), "clpipe.log"), + os.chmod(logs_dir / "clpipe.log", stat.S_IREAD | stat.S_IWRITE | stat.S_IRGRP | stat.S_IWGRP) logger = get_logger(STEP_NAME, debug=debug) - default_dicom_dir = os.path.join(os.path.abspath(project_dir), DEFAULT_DICOM_DIR) + default_dicom_dir = project_dir / DEFAULT_DICOM_DIR if symlink_source_data and move_source_data: logger.error("Cannot choose to both move and symlink the source data.") @@ -37,23 +41,20 @@ def project_setup(project_title=None, project_dir=None, sys.exit(1) elif source_data: logger.info(f"Referencing source data: {source_data}") - source_data = os.path.abspath(source_data) + source_data = Path(source_data).resolve() else: - logger.info(f"No source data specified.") + logger.info(f"No source data specified. Defaulting to: {default_dicom_dir}") source_data = default_dicom_dir + source_data.mkdir(exist_ok=False) logger.info(f"Starting project setup with title: {project_title}") - config_parser.setup_project(project_title, project_dir, source_data) + logger.info(f"Creating new clpipe project in directory: {str(project_dir)}") + config_parser.setup_project(project_title, str(project_dir), source_data) config = config_parser.config - # Create the project directory - os.makedirs(project_dir, exist_ok=True) - logger.info(f"Created project directory at: {project_dir}") - bids_dir = config['DICOMToBIDSOptions']['BIDSDirectory'] - project_dir = config['ProjectDirectory'] conv_config_path = config['DICOMToBIDSOptions']['ConversionConfig'] if symlink_source_data: @@ -71,22 +72,22 @@ def project_setup(project_title=None, project_dir=None, logger.debug('Creating JSON config file') - config_parser.config_json_dump(project_dir, DEFAULT_CONFIG_FILE_NAME) + config_parser.config_json_dump(str(project_dir), DEFAULT_CONFIG_FILE_NAME) with resource_stream(__name__, DEFAULT_CONFIG_PATH) as def_conv_config: conv_config = json.load(def_conv_config) - logger.debug('JSON object loaded') + logger.debug('Default conversion config loaded') with open(conv_config_path, 'w') as fp: json.dump(conv_config, fp, indent='\t') - logger.debug('JSON indentation completed') + logger.debug(f'Created default conversion config file: {conv_config_path}') - os.makedirs(os.path.join(project_dir, 'analyses'), - exist_ok=True) - logger.debug('Created empty analyses directory') + analyses_dir = project_dir / 'analyses' + analyses_dir.mkdir(exist_ok=True) + logger.debug(f'Created empty analyses directory: {analyses_dir}') - os.makedirs(os.path.join(project_dir, 'scripts'), - exist_ok=True) - logger.debug('Created empty scripts directory') + script_dir = project_dir / 'scripts' + script_dir.mkdir(exist_ok=True) + logger.debug(f'Created empty scripts directory: {script_dir}') logger.info('Completed project setup') \ No newline at end of file From 97b98add4c3d5e5510feeafed6b4b4dab8c66124 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 19:42:51 -0500 Subject: [PATCH 38/82] Update test_setup. --- tests/setup/test_setup.py | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/tests/setup/test_setup.py b/tests/setup/test_setup.py index 964ef7b8..e3762683 100644 --- a/tests/setup/test_setup.py +++ b/tests/setup/test_setup.py @@ -1,17 +1,21 @@ import pytest from pathlib import Path -def test_setup_no_source(): +from clpipe.project_setup import project_setup + +PROJECT_TITLE = "test_project" + +def test_setup_no_source(project_dir): """Check that clpipe creates an empty data_DICOMs folder in the project directory when no source is provided. - - project_setup(project_title="Test", project_dir="TestDir", - source_data=None, move_source_data=False, - symlink_source_data=False, debug=False) """ - pass + + project_setup(project_title=PROJECT_TITLE, project_dir=project_dir) -def test_setup_referenced_source(): + assert Path(project_dir / "data_DICOMs").exists() + + +def test_setup_referenced_source(project_dir): """Check that clpipe's generated config file references a specified source directory that is not within the clpipe project directory. This variant should not create a data_DICOMs directory. @@ -20,9 +24,10 @@ def test_setup_referenced_source(): source_data="TestSource", move_source_data=False, symlink_source_data=False, debug=False) """ - pass + assert False -def test_setup_symlink_source(): + +def test_setup_symlink_source(project_dir): """Check that clpipe creates a data_DICOMs dir and symlinks it to the given source data. @@ -30,9 +35,11 @@ def test_setup_symlink_source(): source_data="TestSource", move_source_data=False, symlink_source_data=True, debug=False) """ - pass + assert False + -def test_setup_move_source(): +@pytest.mark.skip(reason="Feature Not implemented") +def test_setup_move_source(project_dir): """Note: this is currently NOT IMPLEMENTED in project setup. Check that clpipe creates a data_DICOMs dir and moves the data from a given From 84ed540675f261d17b61deee9012639f9893f42b Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 9 Feb 2023 19:43:36 -0500 Subject: [PATCH 39/82] Add a fixture just for project dir. --- tests/conftest.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 819f4967..1415581a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -85,19 +85,27 @@ def sample_raw_image_mask() -> Path: return Path("tests/data/sample_raw_mask.nii.gz").resolve() + +@pytest.fixture(scope="function") +def project_dir(tmp_path_factory): + """Fixture which provides a temporary space for a project folder.""" + proj_path = tmp_path_factory.mktemp(PROJECT_TITLE) + return Path(proj_path) + + @pytest.fixture(scope="module") def clpipe_dir(tmp_path_factory): """Fixture which provides a temporary clpipe project folder.""" - proj_path = tmp_path_factory.mktemp(PROJECT_TITLE) - - raw_data = Path(proj_path / "data_DICOMs") + project_dir = tmp_path_factory.mktemp(PROJECT_TITLE) + + raw_data = Path(project_dir / "data_DICOMs") raw_data.mkdir(parents=True, exist_ok=True) - project_setup(project_title=PROJECT_TITLE, project_dir=str(proj_path), + project_setup(project_title=PROJECT_TITLE, project_dir=str(project_dir), source_data=str(raw_data)) - return proj_path + return project_dir @pytest.fixture(scope="module") def clpipe_dicom_dir(clpipe_dir): From e51bea25f24bf5dadef04a9ad5064a9f5a5cace5 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 13 Feb 2023 12:30:06 -0500 Subject: [PATCH 40/82] As part of a move towards making clpipe completely config file driven, make the config file argument required for all commands. --- clpipe/cli.py | 34 ++++++++++++++++------------------ 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/clpipe/cli.py b/clpipe/cli.py index b487704c..ab3f1f69 100644 --- a/clpipe/cli.py +++ b/clpipe/cli.py @@ -171,7 +171,7 @@ def project_setup_cli(project_title=None, project_dir=None, source_data=None, @click.command(CONVERSION_COMMAND_NAME, no_args_is_help=True) @click.argument('subjects', nargs=-1, required=False, default=None) -@click.option('-config_file', '-c', type=CLICK_FILE_TYPE_EXISTS, default=None, +@click.option('-config_file', '-c', type=CLICK_FILE_TYPE_EXISTS, required=True, help=CONFIG_HELP) @click.option('-conv_config_file', type=CLICK_FILE_TYPE_EXISTS, default=None, help=CONVERSION_CONFIG_HELP) @@ -221,7 +221,7 @@ def convert2bids_cli(dicom_dir, dicom_dir_format, bids_dir, @click.command(VALIDATOR_COMMAND_NAME, no_args_is_help=True) @click.argument('bids_dir', type=CLICK_DIR_TYPE_EXISTS, required=False) -@click.option('-config_file', '-c', type=CLICK_FILE_TYPE_EXISTS, default=None, +@click.option('-config_file', '-c', type=CLICK_FILE_TYPE_EXISTS, required=True, help=CONFIG_HELP) @click.option('-log_dir', type=CLICK_FILE_TYPE_EXISTS, default=None, help=LOG_DIR_HELP) @@ -248,7 +248,7 @@ def bids_validate_cli(bids_dir, config_file, log_dir, interactive, submit, @click.command(FMRIPREP_COMMAND_NAME, no_args_is_help=True) @click.argument('subjects', nargs=-1, required=False, default=None) -@click.option('-config_file', '-c', default=None, type=CLICK_FILE_TYPE_EXISTS, +@click.option('-config_file', '-c', required=True, type=CLICK_FILE_TYPE_EXISTS, help=CONFIG_HELP) @click.option('-bids_dir', '-i', type=CLICK_DIR_TYPE_EXISTS, help=BIDS_DIR_HELP) @@ -295,7 +295,7 @@ def get_fmriprep_reports_cli(config_file, output_name, clear_temp, debug): @click.command(POSTPROCESS_COMMAND_NAME, no_args_is_help=True) @click.argument('subjects', nargs=-1, required=False, default=None) -@click.option('-config_file', '-c', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None, help = 'Use a given configuration file. If left blank, uses the default config file, requiring definition of BIDS, working and output directories.') +@click.option('-config_file', '-c', type=click.Path(exists=True, dir_okay=False, file_okay=True), required=True, help = 'Use a given configuration file. If left blank, uses the default config file, requiring definition of BIDS, working and output directories.') @click.option('-target_dir', '-i', type=click.Path(exists=True, dir_okay=True, file_okay=False), help='Which fmriprep directory to process. If a configuration file is provided with a BIDS directory, this argument is not necessary. Note, must point to the ``fmriprep`` directory, not its parent directory.') @click.option('-target_suffix', help= 'Which file suffix to use. If a configuration file is provided with a target suffix, this argument is not necessary. Defaults to "preproc_bold.nii.gz"') @click.option('-output_dir', '-o', type=click.Path(dir_okay=True, file_okay=False), help = 'Where to put the postprocessed data. If a configuration file is provided with a output directory, this argument is not necessary.') @@ -332,8 +332,7 @@ def fmri_postprocess_cli(config_file=None, subjects=None, target_dir=None, @click.command(POSTPROCESS2_COMMAND_NAME, no_args_is_help=True) @click.argument('subjects', nargs=-1, required=False, default=None) -@click.option('-config_file', '-c', type=CLICK_FILE_TYPE_EXISTS, default=None, - required=True, help=CONFIG_HELP) +@click.option('-config_file', '-c', type=CLICK_FILE_TYPE_EXISTS, required=True, help=CONFIG_HELP) @click.option('-fmriprep_dir', '-i', type=CLICK_DIR_TYPE_EXISTS, help=FMRIPREP_DIR_HELP) @click.option('-output_dir', '-o', type=CLICK_DIR_TYPE, default=None, required=False, @@ -371,11 +370,11 @@ def fmri_postprocess2_cli(subjects, config_file, fmriprep_dir, output_dir, @click.command(GLM_SETUP_COMMAND_NAME, no_args_is_help=True) @click.argument('subjects', nargs=-1, required=False, default=None) -@click.option('-config_file', '-c', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None, required = True, +@click.option('-config_file', '-c', type=click.Path(exists=True, dir_okay=False, file_okay=True), required=True, help='Use a given configuration file.') -@click.option('-glm_config_file', '-g', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None, required = True, +@click.option('-glm_config_file', '-g', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None, required=True, help='Use a given GLM configuration file.') -@click.option('-drop_tps', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None, required = False, +@click.option('-drop_tps', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None, required=False, help='Drop timepoints csv sheet') @click.option('-submit', '-s', is_flag=True, default=False, help='Flag to submit commands to the HPC.') @click.option('-batch/-single', default=True, @@ -402,8 +401,7 @@ def glm_setup_cli(subjects, config_file, glm_config_file, submit, batch, debug, @click.argument('level') @click.argument('model') @click.option('-glm_config_file', '-g', type=click.Path(exists=True, dir_okay=False, - file_okay=True), default=None, required = True, - help=CONFIG_HELP) + file_okay=True), required=True, help=CONFIG_HELP) @click.option('-debug', '-d', is_flag=True, help=DEBUG_HELP) def glm_prepare_cli(level, model, glm_config_file, debug): @@ -419,7 +417,7 @@ def glm_prepare_cli(level, model, glm_config_file, debug): @click.command(L1_PREPARE_FSF_COMMAND_NAME, no_args_is_help=True) -@click.option('-glm_config_file', '-g', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None, required = True, +@click.option('-glm_config_file', '-g', type=click.Path(exists=True, dir_okay=False, file_okay=True), required=True, help='Your GLM configuration file.') @click.option('-l1_name', default=None, required = True, help='Name for a given L1 model as defined in your GLM configuration file.') @@ -488,7 +486,7 @@ def glm_apply_mumford_workaround_cli(glm_config_file, l1_feat_folders_path, @click.argument('level') @click.argument('model') @click.option('-glm_config_file', '-g', type=click.Path(exists=True, dir_okay=False, - file_okay=True), default=None, required = True, + file_okay=True), required=True, help=CONFIG_HELP) @click.option('-test_one', is_flag=True, help=TEST_ONE_HELP) @@ -511,9 +509,9 @@ def glm_launch_cli(level, model, glm_config_file, test_one, submit, debug): @click.command(no_args_is_help=True) @click.option('-glm_config_file', '-g', type=click.Path(exists=True, dir_okay=False, - file_okay=True), default=None, required = True, + file_okay=True), required=True, help=CONFIG_HELP) -@click.option('-l1_name', default=None, required = True, +@click.option('-l1_name', required=True, help=L1_MODEL_HELP) @click.option('-test_one', is_flag=True, help=TEST_ONE_HELP) @@ -530,9 +528,9 @@ def glm_l1_launch_cli(glm_config_file, l1_name, test_one, submit, debug): @click.command(no_args_is_help=True) @click.option('-glm_config_file', '-g', type=click.Path(exists=True, dir_okay=False, - file_okay=True), default=None, required = True, + file_okay=True), required=True, help=CONFIG_HELP) -@click.option('-l2_name', default=None, required = True, +@click.option('-l2_name', required=True, help=L2_MODEL_HELP) @click.option('-test_one', is_flag=True, help=TEST_ONE_HELP) @@ -550,7 +548,7 @@ def glm_l2_launch_cli(glm_config_file, l2_name, test_one, submit, debug): @click.command(ONSET_EXTRACT_COMMAND_NAME, no_args_is_help=True) @click.option('-config_file', '-c', type=click.Path(exists=True, dir_okay=False, file_okay=True), - default=None, required = True, + required=True, help='Use a given configuration file.') @click.option('-glm_config_file', '-g', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None, required = True, From c705547e2b6b2079ca1b4859375b0d5aa9dd2216 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 13 Feb 2023 12:31:46 -0500 Subject: [PATCH 41/82] Fix reference to glm_apply_mumford_workaround. --- clpipe/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clpipe/cli.py b/clpipe/cli.py index ab3f1f69..d514a428 100644 --- a/clpipe/cli.py +++ b/clpipe/cli.py @@ -471,7 +471,7 @@ def glm_apply_mumford_workaround_cli(glm_config_file, l1_feat_folders_path, Must provide GLM config file OR a path to your L1 FEAT folders. """ - from .glm_l2 import glm_apply_mumford_workaround + from .glm_prepare import glm_apply_mumford_workaround if not (glm_config_file or l1_feat_folders_path): click.echo(("Error: At least one of either option '-glm_config_file' " "or '-l1_feat_folders_path' required.")) From acb634c017587a533a48d8ab407e684d1a02afeb Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 13 Feb 2023 12:55:10 -0500 Subject: [PATCH 42/82] Skip bids tests causing erroneous submissions to occur. --- tests/bids/test_conversion.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/bids/test_conversion.py b/tests/bids/test_conversion.py index 093069e7..1259904b 100644 --- a/tests/bids/test_conversion.py +++ b/tests/bids/test_conversion.py @@ -2,6 +2,7 @@ from clpipe.bids_conversion import convert2bids +@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -12,6 +13,7 @@ def test_dcm2bids(clpipe_dicom_dir, config_file): assert True +@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids_sub_session(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -22,6 +24,7 @@ def test_dcm2bids_sub_session(clpipe_dicom_dir, config_file): assert True +@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids_sub_session_flat(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -32,6 +35,7 @@ def test_dcm2bids_sub_session_flat(clpipe_dicom_dir, config_file): assert True +@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids_session_sub(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -42,6 +46,7 @@ def test_dcm2bids_session_sub(clpipe_dicom_dir, config_file): assert True +@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids_session_sub_flat(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -52,6 +57,7 @@ def test_dcm2bids_session_sub_flat(clpipe_dicom_dir, config_file): assert True +@pytest.mark.skip(reason="Erroneously causing submission.") def test_heudiconv(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = False, From 4199ea6aab30070cf24c66a91b545b3f4c205dcb Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 13 Feb 2023 12:55:35 -0500 Subject: [PATCH 43/82] Make sure flags are booleans in the convert2bids signature. --- clpipe/bids_conversion.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/clpipe/bids_conversion.py b/clpipe/bids_conversion.py index 1f440959..8cdaa233 100644 --- a/clpipe/bids_conversion.py +++ b/clpipe/bids_conversion.py @@ -23,10 +23,10 @@ def convert2bids(dicom_dir=None, dicom_dir_format=None, bids_dir=None, - conv_config_file=None, config_file=None, overwrite=None, + conv_config_file=None, config_file=None, overwrite=False, clear_cache=False, clear_outputs=False, log_dir=None, subject=None, subjects=None, session=None, - longitudinal=False, status_cache=None, submit=None, debug=False, + longitudinal=False, status_cache=None, submit=False, debug=False, dcm2bids=True, batch=False): config_parser = ClpipeConfigParser() From e5527d25820dae7da015196973fd296c3030fda8 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 13 Feb 2023 13:05:29 -0500 Subject: [PATCH 44/82] Remove bids test skips. --- tests/bids/test_conversion.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/bids/test_conversion.py b/tests/bids/test_conversion.py index 1259904b..093069e7 100644 --- a/tests/bids/test_conversion.py +++ b/tests/bids/test_conversion.py @@ -2,7 +2,6 @@ from clpipe.bids_conversion import convert2bids -@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -13,7 +12,6 @@ def test_dcm2bids(clpipe_dicom_dir, config_file): assert True -@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids_sub_session(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -24,7 +22,6 @@ def test_dcm2bids_sub_session(clpipe_dicom_dir, config_file): assert True -@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids_sub_session_flat(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -35,7 +32,6 @@ def test_dcm2bids_sub_session_flat(clpipe_dicom_dir, config_file): assert True -@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids_session_sub(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -46,7 +42,6 @@ def test_dcm2bids_session_sub(clpipe_dicom_dir, config_file): assert True -@pytest.mark.skip(reason="Erroneously causing submission.") def test_dcm2bids_session_sub_flat(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = True, @@ -57,7 +52,6 @@ def test_dcm2bids_session_sub_flat(clpipe_dicom_dir, config_file): assert True -@pytest.mark.skip(reason="Erroneously causing submission.") def test_heudiconv(clpipe_dicom_dir, config_file): convert2bids( dcm2bids = False, From 80f3734a2b44f71dca76fa782e224255e03f706d Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 12:21:05 -0500 Subject: [PATCH 45/82] Rename workflows with the format: ProcessingStep_algorithm_name for consistency with the step names in configuration, and in labeling the algorithm of a step. Eventually, it would be better to use constants here. --- clpipe/postprocutils/workflows.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/clpipe/postprocutils/workflows.py b/clpipe/postprocutils/workflows.py index 735d5ca6..8ea1a2d2 100644 --- a/clpipe/postprocutils/workflows.py +++ b/clpipe/postprocutils/workflows.py @@ -300,7 +300,7 @@ def build_100_voxel_mean_workflow(in_file: os.PathLike=None, out_file: os.PathLi div_math = BinaryMaths(operation='div') div_mean_node = pe.Node(div_math, name="div_mean") #operand_file=mean_path - workflow = pe.Workflow(name="100_Voxel_Mean", base_dir=base_dir) + workflow = pe.Workflow(name="IntensityNormalization_100_Voxel_Mean", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -324,7 +324,7 @@ def build_SUSAN_workflow(in_file: os.PathLike=None, mask_path: os.PathLike=None, pe.Workflow: A SUSAN smoothing workflow. """ - workflow = pe.Workflow(name="SUSAN", base_dir=base_dir) + workflow = pe.Workflow(name="SpatialSmoothing_SUSAN", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -426,7 +426,7 @@ def _setup_usans_input(tmean_image, susan_threshold: float): def build_butterworth_filter_workflow(hp: float, lp: float, tr: float, order: float=None, in_file: os.PathLike=None, out_file: os.PathLike=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="Butterworth", base_dir=base_dir) + workflow = pe.Workflow(name="TemporalFilter_Butterworth", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -452,7 +452,7 @@ def build_butterworth_filter_workflow(hp: float, lp: float, tr: float, order: fl def build_fslmath_temporal_filter(hp: float, lp: float, tr: float, order: float=None, in_file: os.PathLike=None, out_file: os.PathLike=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="fslmaths_Temporal_Filter", base_dir=base_dir) + workflow = pe.Workflow(name="TemporalFilter_fslmaths", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -492,7 +492,7 @@ def build_confound_regression_fsl_glm_workflow(in_file: os.PathLike=None, out_fi base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): #TODO: This function currently returns an empy image - workflow = pe.Workflow(name="Confound_Regression", base_dir=base_dir) + workflow = pe.Workflow(name="ConfoundRegression_fsl_glm", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -528,7 +528,7 @@ def build_confound_regression_afni_3dTproject(in_file: os.PathLike=None, out_fil # Something specific to confound_regression's setup is not letting it work in postproc wf builder - workflow = pe.Workflow(name="Confound_Regression", base_dir=base_dir) + workflow = pe.Workflow(name="ConfoundRegression_3dTproject", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -560,7 +560,7 @@ def build_confound_regression_afni_3dTproject(in_file: os.PathLike=None, out_fil def build_aroma_workflow_fsl_regfilt(in_file: os.PathLike=None, out_file: os.PathLike=None, mixing_file: os.PathLike=None, noise_file: os.PathLike=None, mask_file: os.PathLike=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="Apply_AROMA", base_dir=base_dir) + workflow = pe.Workflow(name="ApplyAROMA_fsl_regfilt", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -599,7 +599,7 @@ def build_aroma_workflow_fsl_regfilt_R(in_file: os.PathLike=None, out_file: os.P clpipe.postprocutils.r_setup.setup_clpipe_R_lib() fsl_regfilt_R_script_path = pkg_resources.resource_filename("clpipe", "data/R_scripts/fsl_regfilt.R") - workflow = pe.Workflow(name="Apply_AROMA_fsl_regfilt_R", base_dir=base_dir) + workflow = pe.Workflow(name="ApplyAROMA_fsl_regfilt_R", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -630,7 +630,7 @@ def build_aroma_workflow_fsl_regfilt_R(in_file: os.PathLike=None, out_file: os.P def build_apply_mask_workflow(in_file: os.PathLike=None, out_file: os.PathLike=None, mask_file:os.PathLike=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="Apply_Mask", base_dir=base_dir) + workflow = pe.Workflow(name="ApplyMask", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -655,7 +655,7 @@ def build_apply_mask_workflow(in_file: os.PathLike=None, def build_trim_timepoints_workflow(in_file: os.PathLike=None, out_file: os.PathLike=None, trim_from_beginning=None, trim_from_end=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="Trim_Timepoints", base_dir=base_dir) + workflow = pe.Workflow(name="TrimTimepoints", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir From 841fa0df40c216eb3b038431839e976e6df660cf Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 12:45:09 -0500 Subject: [PATCH 46/82] Go ahead and make the refactor to move step and implementation names to constants. Replace string literals with constants where necessary. Workflow names will now be consistent with step/implementation names required in configuration file. --- clpipe/postprocutils/workflows.py | 93 +++++++++++++++++++------------ 1 file changed, 57 insertions(+), 36 deletions(-) diff --git a/clpipe/postprocutils/workflows.py b/clpipe/postprocutils/workflows.py index 8ea1a2d2..7b63f3ca 100644 --- a/clpipe/postprocutils/workflows.py +++ b/clpipe/postprocutils/workflows.py @@ -23,9 +23,30 @@ from ..errors import ImplementationNotFoundError import clpipe.postprocutils.r_setup -RESCALING_10000_GLOBALMEDIAN = "globalmedian_10000" -RESCALING_100_VOXELMEAN = "voxelmean_100" -NORMALIZATION_METHODS = (RESCALING_10000_GLOBALMEDIAN, RESCALING_100_VOXELMEAN) +#TODO: Set these values up as hierarchical, maybe with enums + +STEP_TEMPORAL_FILTERING = "TemporalFiltering" +IMPLEMENTATION_BUTTERWORTH = "Butterworth" +IMPLEMENTATION_FSLMATHS = "fslmaths" + +STEP_INTENSITY_NORMALIZATION = "IntensityNormalization" +IMPLEMENTATION_10000_GLOBAL_MEDIAN = "10000_GlobalMedian" +IMPLEMENTATION_100_VOXEL_MEAN = "100_voxelmean" + +STEP_SPATIAL_SMOOTHING = "SpatialSmoothing" +IMPLEMENTATION_SUSAN = "SUSAN" + +STEP_AROMA_REGRESSION = "AROMARegression" +IMPLEMENTATION_FSL_REGFILT = "fsl_regfilt" +IMPLEMENTATION_FSL_REGFILT_R = "fsl_regfilt_R" + +STEP_CONFOUND_REGRESSION = "ConfoundRegression" +IMPLEMENTATION_FSL_GLM = "fsl_glm" +IMPLEMENTATION_AFNI_3DTPROJECT = "afni_3dTproject" + +STEP_APPLY_MASK = "ApplyMask" +STEP_TRIM_TIMEPOINTS = "TrimTimepoints" +STEP_RESAMPLE = "Resample" def build_postprocessing_workflow(image_wf: pe.Workflow=None, confounds_wf: pe.Workflow=None, name: str="Postprocessing_Pipeline", confound_regression: bool=False, @@ -107,7 +128,7 @@ def build_image_postprocessing_workflow(postprocessing_config: dict, in_file: os # Iterate through list of processing steps, adding a new sub workflow for each step for index, step in enumerate(processing_steps): # Decide which wf to add next - if step == "TemporalFiltering": + if step == STEP_TEMPORAL_FILTERING: if not tr: raise ValueError(f"Missing TR corresponding to image: {in_file}") hp = postprocessing_config["ProcessingStepOptions"][step]["FilteringHighPass"] @@ -119,14 +140,14 @@ def build_image_postprocessing_workflow(postprocessing_config: dict, in_file: os current_wf = temporal_filter_implementation(hp=hp,lp=lp, tr=tr, order=order, base_dir=postproc_wf.base_dir, crashdump_dir=crashdump_dir) - elif step == "IntensityNormalization": + elif step == STEP_INTENSITY_NORMALIZATION: implementation_name = postprocessing_config["ProcessingStepOptions"][step]["Implementation"] intensity_normalization_implementation = _getIntensityNormalizationImplementation(implementation_name) current_wf = intensity_normalization_implementation(base_dir=postproc_wf.base_dir, mask_file=mask_file, crashdump_dir=crashdump_dir) - elif step == "SpatialSmoothing": + elif step == STEP_SPATIAL_SMOOTHING: fwhm_mm= postprocessing_config["ProcessingStepOptions"][step]["FWHM"] #brightness_threshold = postprocessing_config["ProcessingStepOptions"][step]["BrightnessThreshold"] implementation_name = postprocessing_config["ProcessingStepOptions"][step]["Implementation"] @@ -135,14 +156,14 @@ def build_image_postprocessing_workflow(postprocessing_config: dict, in_file: os current_wf = spatial_smoothing_implementation(base_dir=postproc_wf.base_dir, mask_path=mask_file, fwhm_mm=fwhm_mm, crashdump_dir=crashdump_dir) - elif step == "AROMARegression": + elif step == STEP_AROMA_REGRESSION: implementation_name = postprocessing_config["ProcessingStepOptions"][step]["Implementation"] apply_aroma_implementation = _getAROMARegressionImplementation(implementation_name) current_wf = apply_aroma_implementation(mixing_file=mixing_file, noise_file=noise_file, mask_file=mask_file, base_dir=postproc_wf.base_dir, crashdump_dir=crashdump_dir) - elif step == "ConfoundRegression": + elif step == STEP_CONFOUND_REGRESSION: implementation_name = postprocessing_config["ProcessingStepOptions"][step]["Implementation"] confound_regression_implementation = _getConfoundRegressionImplementation(implementation_name) @@ -151,17 +172,17 @@ def build_image_postprocessing_workflow(postprocessing_config: dict, in_file: os postproc_wf.connect(input_node, "confounds_file", current_wf, "inputnode.confounds_file") - elif step == "ApplyMask": + elif step == STEP_APPLY_MASK: current_wf = build_apply_mask_workflow(mask_file=mask_file, base_dir=postproc_wf.base_dir, crashdump_dir=crashdump_dir) - elif step == "TrimTimepoints": + elif step == STEP_TRIM_TIMEPOINTS: trim_from_beginning = postprocessing_config["ProcessingStepOptions"][step]["FromEnd"] trim_from_end = postprocessing_config["ProcessingStepOptions"][step]["FromBeginning"] current_wf = build_trim_timepoints_workflow(trim_from_beginning=trim_from_beginning, trim_from_end=trim_from_end, base_dir=postproc_wf.base_dir, crashdump_dir=crashdump_dir) - elif step == "Resample": + elif step == STEP_RESAMPLE: reference_image = postprocessing_config["ProcessingStepOptions"][step]["ReferenceImage"] if reference_image == "SET REFERENCE IMAGE": raise ValueError("No reference image provided. Please set a path to reference in clpipe_config.json") @@ -189,44 +210,44 @@ def build_image_postprocessing_workflow(postprocessing_config: dict, in_file: os def _getTemporalFilterImplementation(implementationName: str): - if implementationName == "Butterworth": + if implementationName == IMPLEMENTATION_BUTTERWORTH: return build_butterworth_filter_workflow - elif implementationName == "fslmaths": + elif implementationName == IMPLEMENTATION_FSLMATHS: return build_fslmath_temporal_filter else: - raise ImplementationNotFoundError(f"Temporal filtering implementation not found: {implementationName}") + raise ImplementationNotFoundError(f"{STEP_TEMPORAL_FILTERING} implementation not found: {implementationName}") def _getIntensityNormalizationImplementation(implementationName: str): - if implementationName == "10000_GlobalMedian": + if implementationName == IMPLEMENTATION_10000_GLOBAL_MEDIAN: return build_10000_global_median_workflow else: - raise ImplementationNotFoundError(f"Intensity normalization implementation not found: {implementationName}") + raise ImplementationNotFoundError(f"{STEP_INTENSITY_NORMALIZATION} implementation not found: {implementationName}") def _getSpatialSmoothingImplementation(implementationName: str): - if implementationName == "SUSAN": + if implementationName == IMPLEMENTATION_SUSAN: return build_SUSAN_workflow else: - raise ImplementationNotFoundError(f"Spatial smoothing implementation not found: {implementationName}") + raise ImplementationNotFoundError(f"{STEP_SPATIAL_SMOOTHING} implementation not found: {implementationName}") def _getAROMARegressionImplementation(implementationName: str): - if implementationName == "fsl_regfilt": + if implementationName == IMPLEMENTATION_FSL_REGFILT: return build_aroma_workflow_fsl_regfilt - if implementationName == "fsl_regfilt_R": + if implementationName == IMPLEMENTATION_FSL_REGFILT_R: return build_aroma_workflow_fsl_regfilt_R else: - raise ImplementationNotFoundError(f"AROMA regression implementation not found: {implementationName}") + raise ImplementationNotFoundError(f"{STEP_AROMA_REGRESSION} implementation not found: {implementationName}") def _getConfoundRegressionImplementation(implementationName: str): - if implementationName == "fsl_glm": + if implementationName == IMPLEMENTATION_FSL_GLM: return build_confound_regression_fsl_glm_workflow - elif implementationName == "afni_3dTproject": + elif implementationName == IMPLEMENTATION_AFNI_3DTPROJECT: return build_confound_regression_afni_3dTproject else: - raise ImplementationNotFoundError(f"Confound regression implementation not found: {implementationName}") + raise ImplementationNotFoundError(f"{STEP_CONFOUND_REGRESSION} implementation not found: {implementationName}") def build_10000_global_median_workflow(in_file: os.PathLike=None, out_file:os.PathLike=None, @@ -257,7 +278,7 @@ def build_10000_global_median_workflow(in_file: os.PathLike=None, out_file:os.Pa median_node.inputs.op_string = "-k %s -p 50" - workflow = pe.Workflow(name="10000_Global_Median", base_dir=base_dir) + workflow = pe.Workflow(name=f"{STEP_INTENSITY_NORMALIZATION}_{IMPLEMENTATION_10000_GLOBAL_MEDIAN}", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -300,7 +321,7 @@ def build_100_voxel_mean_workflow(in_file: os.PathLike=None, out_file: os.PathLi div_math = BinaryMaths(operation='div') div_mean_node = pe.Node(div_math, name="div_mean") #operand_file=mean_path - workflow = pe.Workflow(name="IntensityNormalization_100_Voxel_Mean", base_dir=base_dir) + workflow = pe.Workflow(name=f"{STEP_INTENSITY_NORMALIZATION}_{IMPLEMENTATION_100_VOXEL_MEAN}", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -324,7 +345,7 @@ def build_SUSAN_workflow(in_file: os.PathLike=None, mask_path: os.PathLike=None, pe.Workflow: A SUSAN smoothing workflow. """ - workflow = pe.Workflow(name="SpatialSmoothing_SUSAN", base_dir=base_dir) + workflow = pe.Workflow(name=f"{STEP_SPATIAL_SMOOTHING}_{IMPLEMENTATION_SUSAN}", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -426,7 +447,7 @@ def _setup_usans_input(tmean_image, susan_threshold: float): def build_butterworth_filter_workflow(hp: float, lp: float, tr: float, order: float=None, in_file: os.PathLike=None, out_file: os.PathLike=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="TemporalFilter_Butterworth", base_dir=base_dir) + workflow = pe.Workflow(name=f"{STEP_TEMPORAL_FILTERING}_{IMPLEMENTATION_BUTTERWORTH}", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -452,7 +473,7 @@ def build_butterworth_filter_workflow(hp: float, lp: float, tr: float, order: fl def build_fslmath_temporal_filter(hp: float, lp: float, tr: float, order: float=None, in_file: os.PathLike=None, out_file: os.PathLike=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="TemporalFilter_fslmaths", base_dir=base_dir) + workflow = pe.Workflow(name=f"{STEP_TEMPORAL_FILTERING}_{IMPLEMENTATION_FSLMATHS}", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -492,7 +513,7 @@ def build_confound_regression_fsl_glm_workflow(in_file: os.PathLike=None, out_fi base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): #TODO: This function currently returns an empy image - workflow = pe.Workflow(name="ConfoundRegression_fsl_glm", base_dir=base_dir) + workflow = pe.Workflow(name=f"{STEP_CONFOUND_REGRESSION}_{IMPLEMENTATION_FSL_GLM}", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -528,7 +549,7 @@ def build_confound_regression_afni_3dTproject(in_file: os.PathLike=None, out_fil # Something specific to confound_regression's setup is not letting it work in postproc wf builder - workflow = pe.Workflow(name="ConfoundRegression_3dTproject", base_dir=base_dir) + workflow = pe.Workflow(name=f"{STEP_CONFOUND_REGRESSION}_{IMPLEMENTATION_AFNI_3DTPROJECT}", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -560,7 +581,7 @@ def build_confound_regression_afni_3dTproject(in_file: os.PathLike=None, out_fil def build_aroma_workflow_fsl_regfilt(in_file: os.PathLike=None, out_file: os.PathLike=None, mixing_file: os.PathLike=None, noise_file: os.PathLike=None, mask_file: os.PathLike=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="ApplyAROMA_fsl_regfilt", base_dir=base_dir) + workflow = pe.Workflow(name=f"{STEP_AROMA_REGRESSION}_{IMPLEMENTATION_FSL_REGFILT}", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -599,7 +620,7 @@ def build_aroma_workflow_fsl_regfilt_R(in_file: os.PathLike=None, out_file: os.P clpipe.postprocutils.r_setup.setup_clpipe_R_lib() fsl_regfilt_R_script_path = pkg_resources.resource_filename("clpipe", "data/R_scripts/fsl_regfilt.R") - workflow = pe.Workflow(name="ApplyAROMA_fsl_regfilt_R", base_dir=base_dir) + workflow = pe.Workflow(name=f"{STEP_AROMA_REGRESSION}_{IMPLEMENTATION_FSL_REGFILT_R}", base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -630,7 +651,7 @@ def build_aroma_workflow_fsl_regfilt_R(in_file: os.PathLike=None, out_file: os.P def build_apply_mask_workflow(in_file: os.PathLike=None, out_file: os.PathLike=None, mask_file:os.PathLike=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="ApplyMask", base_dir=base_dir) + workflow = pe.Workflow(name=STEP_APPLY_MASK, base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -655,7 +676,7 @@ def build_apply_mask_workflow(in_file: os.PathLike=None, def build_trim_timepoints_workflow(in_file: os.PathLike=None, out_file: os.PathLike=None, trim_from_beginning=None, trim_from_end=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="TrimTimepoints", base_dir=base_dir) + workflow = pe.Workflow(name=STEP_TRIM_TIMEPOINTS, base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir @@ -681,7 +702,7 @@ def build_trim_timepoints_workflow(in_file: os.PathLike=None, def build_resample_workflow(reference_image:os.PathLike=None, in_file: os.PathLike=None, out_file: os.PathLike=None, base_dir: os.PathLike=None, crashdump_dir: os.PathLike=None): - workflow = pe.Workflow(name="Resample", base_dir=base_dir) + workflow = pe.Workflow(name=STEP_RESAMPLE, base_dir=base_dir) if crashdump_dir is not None: workflow.config['execution']['crashdump_dir'] = crashdump_dir From b10898ff4e2057153236018376f6082fc55839f2 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 13:04:34 -0500 Subject: [PATCH 47/82] Fix all references in postprocess2 tests. --- tests/postprocess2/test_postprocess2.py | 65 ++++++++++++++----------- 1 file changed, 36 insertions(+), 29 deletions(-) diff --git a/tests/postprocess2/test_postprocess2.py b/tests/postprocess2/test_postprocess2.py index 8a92f073..c2cf0bbf 100644 --- a/tests/postprocess2/test_postprocess2.py +++ b/tests/postprocess2/test_postprocess2.py @@ -5,8 +5,8 @@ from click.testing import CliRunner from clpipe.postprocutils.workflows import * -from clpipe.fmri_postprocess2 import PostProcessSubjectJobs, PostProcessSubjectJob, postprocess_fmriprep_dir, fmri_postprocess2_cli - +from clpipe.fmri_postprocess2 import * +from clpipe.cli import fmri_postprocess2_cli def test_postprocess_cli_debug(clpipe_fmriprep_dir, artifact_dir, helpers, request): """Note: this test always passes because click does its own exit code handling - but this lets one trace through the cli with a debugger""" @@ -27,6 +27,7 @@ def test_postprocess_cli_debug(clpipe_fmriprep_dir, artifact_dir, helpers, reque '-log_dir', str(log_dir), '-no-batch', '-debug']) + def test_postprocess_cli(clpipe_fmriprep_dir, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" config = clpipe_fmriprep_dir / "clpipe_config.json" @@ -38,7 +39,7 @@ def test_postprocess_cli(clpipe_fmriprep_dir, artifact_dir, helpers, request): runner = CliRunner() result = runner.invoke( - hngprep_cli, + fmri_postprocess2_cli, ['-config_file', str(config), '-target_dir', str(fmriprep_dir), '-output_dir', str(postproc_dir), @@ -49,7 +50,8 @@ def test_postprocess_cli(clpipe_fmriprep_dir, artifact_dir, helpers, request): traceback.print_exc(result.exception) assert result.stderr == 0 -def test_postprocess_fmriprep_dir(clpipe_fmriprep_dir, artifact_dir, helpers, request): + +def test_postprocess_subjects_dir(clpipe_fmriprep_dir, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" config = clpipe_fmriprep_dir / "clpipe_config.json" glm_config = clpipe_fmriprep_dir / "glm_config.json" @@ -58,16 +60,18 @@ def test_postprocess_fmriprep_dir(clpipe_fmriprep_dir, artifact_dir, helpers, re log_dir = Path(test_dir / "logs" / "postproc_logs") log_dir.mkdir(parents=True, exist_ok=True) - postprocess_fmriprep_dir(config_file=config, glm_config_file=glm_config, fmriprep_dir=fmriprep_dir, + postprocess_subjects(config_file=config, glm_config_file=glm_config, fmriprep_dir=fmriprep_dir, output_dir=postproc_dir, log_dir=log_dir) -def test_postprocess_fmriprep_dir_config_only(clpipe_fmriprep_dir, artifact_dir, helpers, request): + +def test_postprocess_subjects_dir_config_only(clpipe_fmriprep_dir): config = clpipe_fmriprep_dir / "clpipe_config.json" with pytest.raises(SystemExit): - postprocess_fmriprep_dir(config_file=config, submit=True, batch=False) + postprocess_subjects(config_file=config, submit=True, batch=False) + -def test_postprocess_fmriprep_dir_invalid_subject(clpipe_fmriprep_dir, artifact_dir, helpers, request): +def test_postprocess_subjects_dir_invalid_subject(clpipe_fmriprep_dir, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" config = clpipe_fmriprep_dir / "clpipe_config.json" glm_config = clpipe_fmriprep_dir / "glm_config.json" @@ -76,9 +80,10 @@ def test_postprocess_fmriprep_dir_invalid_subject(clpipe_fmriprep_dir, artifact_ log_dir = Path(test_dir / "logs" / "postproc_logs") log_dir.mkdir(parents=True, exist_ok=True) - postprocess_fmriprep_dir(subjects=['99'], config_file=config, glm_config_file=glm_config, fmriprep_dir=fmriprep_dir, + postprocess_subjects(subjects=['99'], config_file=config, glm_config_file=glm_config, fmriprep_dir=fmriprep_dir, output_dir=postproc_dir, log_dir=log_dir) + def test_postprocess2_wf_2_steps(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, sample_confounds_timeseries, plot_img, write_graph, helpers): @@ -101,6 +106,7 @@ def test_postprocess2_wf_2_steps(artifact_dir, postprocessing_config, request, s assert True + def test_postprocess2_wf_3_steps(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, sample_confounds_timeseries, plot_img, write_graph, helpers): @@ -123,6 +129,7 @@ def test_postprocess2_wf_3_steps(artifact_dir, postprocessing_config, request, s assert True + def test_postprocess2_wf_1_step(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, sample_confounds_timeseries, plot_img, write_graph, helpers): @@ -145,6 +152,7 @@ def test_postprocess2_wf_1_step(artifact_dir, postprocessing_config, request, sa assert True + # This test won't work until properly processed confound file provided def test_postprocess2_wf_confound_regression_last(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, sample_confounds_timeseries, plot_img, write_graph, helpers): @@ -168,6 +176,7 @@ def test_postprocess2_wf_confound_regression_last(artifact_dir, postprocessing_c assert True + def test_postprocess2_wf_confound_regression_first(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, sample_confounds_timeseries, plot_img, write_graph, helpers): @@ -190,6 +199,7 @@ def test_postprocess2_wf_confound_regression_first(artifact_dir, postprocessing_ assert True + def test_postprocess2_wf_aroma(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, sample_melodic_mixing, sample_aroma_noise_ics, plot_img, write_graph, helpers): @@ -212,6 +222,7 @@ def test_postprocess2_wf_aroma(artifact_dir, postprocessing_config, request, sam assert True + def test_postprocess2_wf_aroma_last(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, sample_melodic_mixing, sample_aroma_noise_ics, plot_img, write_graph, helpers): @@ -234,6 +245,7 @@ def test_postprocess2_wf_aroma_last(artifact_dir, postprocessing_config, request assert True + def test_postprocess2_wf_no_mask(artifact_dir, postprocessing_config, request, sample_raw_image, plot_img, write_graph, helpers): test_path = helpers.create_test_dir(artifact_dir, request.node.name) @@ -252,6 +264,7 @@ def test_postprocess2_wf_no_mask(artifact_dir, postprocessing_config, request, s assert True + def test_postprocess_subjects_job(clpipe_fmriprep_dir, artifact_dir, helpers, request): config = clpipe_fmriprep_dir / "clpipe_config.json" bids_dir = clpipe_fmriprep_dir / "data_BIDS" @@ -264,23 +277,22 @@ def test_postprocess_subjects_job(clpipe_fmriprep_dir, artifact_dir, helpers, re log_dir = Path(test_dir / "logs" / "postproc_logs") log_dir.mkdir(parents=True, exist_ok=True) - jobs = PostProcessSubjectJobs(bids_dir, fmriprep_dir, postproc_dir, config, + postprocess_subjects(bids_dir, fmriprep_dir, postproc_dir, config, log_dir=log_dir, pybids_db_path=pybids_db_path) - jobs.run() - assert len(jobs.post_process_jobs) == 8 def test_prepare_confounds(sample_confounds_timeseries, postprocessing_config, artifact_dir, helpers, request): test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "new_confounds.tsv" - cf_workflow = build_confound_postprocessing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, + cf_workflow = build_confounds_processing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, out_file=out_path, base_dir=test_path, crashdump_dir=test_path, tr=2) cf_workflow.run() assert True + def test_prepare_confounds_aroma(sample_confounds_timeseries, postprocessing_config, sample_melodic_mixing, sample_aroma_noise_ics, artifact_dir, helpers, request): @@ -289,7 +301,7 @@ def test_prepare_confounds_aroma(sample_confounds_timeseries, postprocessing_con postprocessing_config["ProcessingSteps"] = ["AROMARegression", "TemporalFiltering", "IntensityNormalization"] - cf_workflow = build_confound_postprocessing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, + cf_workflow = build_confounds_processing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, out_file=out_path, mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, base_dir=test_path, crashdump_dir=test_path, tr=2) @@ -297,6 +309,7 @@ def test_prepare_confounds_aroma(sample_confounds_timeseries, postprocessing_con assert True + def test_postprocess_subject_job_setup(clpipe_fmriprep_dir, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" bids_dir = clpipe_fmriprep_dir / "data_BIDS" @@ -308,12 +321,8 @@ def test_postprocess_subject_job_setup(clpipe_fmriprep_dir, artifact_dir, helper log_dir.mkdir(parents=True, exist_ok=True) pybids_db_path = test_dir / "BIDS_index" - subject = PostProcessSubjectJob('1', bids_dir, fmriprep_dir, postproc_dir, config, pybids_db_path=pybids_db_path, log_dir=log_dir) - - subject.setup() + postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config, pybids_db_path=pybids_db_path, log_dir=log_dir) - # Should be a task/run for each of rest, task1, task2-run1, task2-run2 - assert len(subject.image_jobs) == 4 def test_postprocess_subject_job(clpipe_fmriprep_dir, config_file, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" @@ -325,9 +334,8 @@ def test_postprocess_subject_job(clpipe_fmriprep_dir, config_file, artifact_dir, log_dir.mkdir(parents=True, exist_ok=True) pybids_db_path = test_dir / "BIDS_index" - subject = PostProcessSubjectJob('1', bids_dir, fmriprep_dir, postproc_dir, config_file, pybids_db_path=pybids_db_path, log_dir=log_dir) + postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config_file, pybids_db_path=pybids_db_path, log_dir=log_dir) - subject() def test_postprocess_subject_with_confounds(clpipe_fmriprep_dir, config_file_confounds, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" @@ -339,8 +347,8 @@ def test_postprocess_subject_with_confounds(clpipe_fmriprep_dir, config_file_con log_dir = Path(test_dir / "logs" / "postproc_logs") log_dir.mkdir(parents=True, exist_ok=True) - subject = PostProcessSubjectJob('1', bids_dir, fmriprep_dir, postproc_dir, config_file_confounds, pybids_db_path=pybids_db_path, log_dir=log_dir) - subject() + postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config_file_confounds, pybids_db_path=pybids_db_path, log_dir=log_dir) + def test_postprocess_subject_aroma(clpipe_fmriprep_dir, config_file_aroma, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" @@ -352,8 +360,8 @@ def test_postprocess_subject_aroma(clpipe_fmriprep_dir, config_file_aroma, artif log_dir = Path(test_dir / "logs" / "postproc_logs") log_dir.mkdir(parents=True, exist_ok=True) - subject = PostProcessSubjectJob('1', bids_dir, fmriprep_dir, postproc_dir, config_file_aroma, pybids_db_path=pybids_db_path, log_dir=log_dir) - subject() + postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config_file_aroma, pybids_db_path=pybids_db_path, log_dir=log_dir) + def test_postprocess_subject_aroma_with_confound_processing(clpipe_fmriprep_dir, config_file_aroma_confounds, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" @@ -365,8 +373,8 @@ def test_postprocess_subject_aroma_with_confound_processing(clpipe_fmriprep_dir, log_dir = Path(test_dir / "logs" / "postproc_logs") log_dir.mkdir(parents=True, exist_ok=True) - subject = PostProcessSubjectJob('1', bids_dir, fmriprep_dir, postproc_dir, config_file_aroma_confounds, pybids_db_path=pybids_db_path, log_dir=log_dir) - subject() + postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config_file_aroma_confounds, pybids_db_path=pybids_db_path, log_dir=log_dir) + def test_postprocess2_wf_fslmaths_temporal_filter(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, sample_confounds_timeseries, plot_img, write_graph, helpers): @@ -391,6 +399,7 @@ def test_postprocess2_wf_fslmaths_temporal_filter(artifact_dir, postprocessing_c assert True + def test_postprocess2_wf_resample(artifact_dir, postprocessing_config, request, sample_raw_image, sample_reference, sample_raw_image_mask, plot_img, write_graph, helpers): postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing", "IntensityNormalization", "TemporalFiltering", "Resample"] @@ -411,5 +420,3 @@ def test_postprocess2_wf_resample(artifact_dir, postprocessing_config, request, helpers.plot_4D_img_slice(out_path, "postProcessed.png") assert True - - From 9ab823a9333cc8a8ebadd9b675dbe954d454f6d7 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 13:16:42 -0500 Subject: [PATCH 48/82] Move wf chain tests to their own module. --- tests/postprocess2/test_postprocess2.py | 275 +----------------------- tests/postprocess2/test_wf_chains.py | 229 ++++++++++++++++++++ 2 files changed, 230 insertions(+), 274 deletions(-) create mode 100644 tests/postprocess2/test_wf_chains.py diff --git a/tests/postprocess2/test_postprocess2.py b/tests/postprocess2/test_postprocess2.py index c2cf0bbf..3194a6e2 100644 --- a/tests/postprocess2/test_postprocess2.py +++ b/tests/postprocess2/test_postprocess2.py @@ -1,54 +1,8 @@ import pytest -from pathlib import Path -import traceback - -from click.testing import CliRunner from clpipe.postprocutils.workflows import * from clpipe.fmri_postprocess2 import * -from clpipe.cli import fmri_postprocess2_cli - -def test_postprocess_cli_debug(clpipe_fmriprep_dir, artifact_dir, helpers, request): - """Note: this test always passes because click does its own exit code handling - but this lets one trace through the cli with a debugger""" - - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" - config = clpipe_fmriprep_dir / "clpipe_config.json" - glm_config = clpipe_fmriprep_dir / "glm_config.json" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - postproc_dir = Path(test_dir / "data_postprocessed") - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - - with pytest.raises(SystemExit): - fmri_postprocess2_cli(['-config_file', str(config), - '-target_dir', str(fmriprep_dir), - '-output_dir', str(postproc_dir), - '-glm_config_file', str(glm_config), - '-log_dir', str(log_dir), - '-no-batch', '-debug']) - - -def test_postprocess_cli(clpipe_fmriprep_dir, artifact_dir, helpers, request): - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" - config = clpipe_fmriprep_dir / "clpipe_config.json" - glm_config = clpipe_fmriprep_dir / "glm_config.json" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - postproc_dir = Path(test_dir / "data_postprocessed") - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - - runner = CliRunner() - result = runner.invoke( - fmri_postprocess2_cli, - ['-config_file', str(config), - '-target_dir', str(fmriprep_dir), - '-output_dir', str(postproc_dir), - '-glm_config_file', str(glm_config), - '-log_dir', str(log_dir), - 'dsfdsf', '-debug'] - ) - traceback.print_exc(result.exception) - assert result.stderr == 0 +from pathlib import Path def test_postprocess_subjects_dir(clpipe_fmriprep_dir, artifact_dir, helpers, request): @@ -84,187 +38,6 @@ def test_postprocess_subjects_dir_invalid_subject(clpipe_fmriprep_dir, artifact_ output_dir=postproc_dir, log_dir=log_dir) -def test_postprocess2_wf_2_steps(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, - sample_confounds_timeseries, plot_img, write_graph, helpers): - - postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing", "IntensityNormalization"] - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True - - -def test_postprocess2_wf_3_steps(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, - sample_confounds_timeseries, plot_img, write_graph, helpers): - - postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing", "IntensityNormalization", "TemporalFiltering"] - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True - - -def test_postprocess2_wf_1_step(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, - sample_confounds_timeseries, plot_img, write_graph, helpers): - - postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing"] - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True - - -# This test won't work until properly processed confound file provided -def test_postprocess2_wf_confound_regression_last(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, - sample_confounds_timeseries, plot_img, write_graph, helpers): - - postprocessing_config["ProcessingSteps"] = ["TemporalFiltering", "ConfoundRegression"] - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True - - -def test_postprocess2_wf_confound_regression_first(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, - sample_confounds_timeseries, plot_img, write_graph, helpers): - - postprocessing_config["ProcessingSteps"] = ["ConfoundRegression", "SpatialSmoothing"] - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True - - -def test_postprocess2_wf_aroma(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, - sample_melodic_mixing, sample_aroma_noise_ics, plot_img, write_graph, helpers): - - postprocessing_config["ProcessingSteps"] = ["AROMARegression", "SpatialSmoothing", "IntensityNormalization"] - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True - - -def test_postprocess2_wf_aroma_last(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, - sample_melodic_mixing, sample_aroma_noise_ics, plot_img, write_graph, helpers): - - postprocessing_config["ProcessingSteps"] = ["TemporalFiltering", "SpatialSmoothing", "IntensityNormalization", "AROMARegression"] - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True - - -def test_postprocess2_wf_no_mask(artifact_dir, postprocessing_config, request, sample_raw_image, - plot_img, write_graph, helpers): - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, sample_raw_image, out_path, 2, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True - - def test_postprocess_subjects_job(clpipe_fmriprep_dir, artifact_dir, helpers, request): config = clpipe_fmriprep_dir / "clpipe_config.json" bids_dir = clpipe_fmriprep_dir / "data_BIDS" @@ -374,49 +147,3 @@ def test_postprocess_subject_aroma_with_confound_processing(clpipe_fmriprep_dir, log_dir.mkdir(parents=True, exist_ok=True) postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config_file_aroma_confounds, pybids_db_path=pybids_db_path, log_dir=log_dir) - - -def test_postprocess2_wf_fslmaths_temporal_filter(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, - sample_confounds_timeseries, plot_img, write_graph, helpers): - - postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing", "IntensityNormalization", "TemporalFiltering"] - postprocessing_config["ProcessingStepOptions"]["TemporalFiltering"]["Algorithm"] = "fslmaths" - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True - - -def test_postprocess2_wf_resample(artifact_dir, postprocessing_config, request, sample_raw_image, sample_reference, sample_raw_image_mask, plot_img, write_graph, helpers): - - postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing", "IntensityNormalization", "TemporalFiltering", "Resample"] - postprocessing_config["ProcessingStepOptions"]["Resample"]["ReferenceImage"] = str(sample_reference) - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "postProcessed.nii.gz" - - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - base_dir=test_path, crashdump_dir=test_path) - - wf.run() - - if write_graph: - wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) - - if plot_img: - helpers.plot_4D_img_slice(out_path, "postProcessed.png") - - assert True diff --git a/tests/postprocess2/test_wf_chains.py b/tests/postprocess2/test_wf_chains.py new file mode 100644 index 00000000..f3d3d8a8 --- /dev/null +++ b/tests/postprocess2/test_wf_chains.py @@ -0,0 +1,229 @@ +import pytest + +from clpipe.fmri_postprocess2 import * + +def test_postprocess2_wf_2_steps(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, + sample_confounds_timeseries, plot_img, write_graph, helpers): + + postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing", "IntensityNormalization"] + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + confound_file=sample_confounds_timeseries, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True + + +def test_postprocess2_wf_3_steps(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, + sample_confounds_timeseries, plot_img, write_graph, helpers): + + postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing", "IntensityNormalization", "TemporalFiltering"] + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + confound_file=sample_confounds_timeseries, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True + + +def test_postprocess2_wf_1_step(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, + sample_confounds_timeseries, plot_img, write_graph, helpers): + + postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing"] + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + confound_file=sample_confounds_timeseries, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True + + +# This test won't work until properly processed confound file provided +def test_postprocess2_wf_confound_regression_last(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, + sample_confounds_timeseries, plot_img, write_graph, helpers): + + postprocessing_config["ProcessingSteps"] = ["TemporalFiltering", "ConfoundRegression"] + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + confound_file=sample_confounds_timeseries, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True + + +def test_postprocess2_wf_confound_regression_first(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, + sample_confounds_timeseries, plot_img, write_graph, helpers): + + postprocessing_config["ProcessingSteps"] = ["ConfoundRegression", "SpatialSmoothing"] + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + confound_file=sample_confounds_timeseries, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True + + +def test_postprocess2_wf_aroma(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, + sample_melodic_mixing, sample_aroma_noise_ics, plot_img, write_graph, helpers): + + postprocessing_config["ProcessingSteps"] = ["AROMARegression", "SpatialSmoothing", "IntensityNormalization"] + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True + + +def test_postprocess2_wf_aroma_last(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, + sample_melodic_mixing, sample_aroma_noise_ics, plot_img, write_graph, helpers): + + postprocessing_config["ProcessingSteps"] = ["TemporalFiltering", "SpatialSmoothing", "IntensityNormalization", "AROMARegression"] + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True + + +def test_postprocess2_wf_no_mask(artifact_dir, postprocessing_config, request, sample_raw_image, + plot_img, write_graph, helpers): + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, sample_raw_image, out_path, 2, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True + + +def test_postprocess2_wf_fslmaths_temporal_filter(artifact_dir, postprocessing_config, request, sample_raw_image, sample_raw_image_mask, + sample_confounds_timeseries, plot_img, write_graph, helpers): + + postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing", "IntensityNormalization", "TemporalFiltering"] + postprocessing_config["ProcessingStepOptions"]["TemporalFiltering"]["Algorithm"] = "fslmaths" + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + confound_file=sample_confounds_timeseries, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True + + +def test_postprocess2_wf_resample(artifact_dir, postprocessing_config, request, sample_raw_image, sample_reference, sample_raw_image_mask, plot_img, write_graph, helpers): + + postprocessing_config["ProcessingSteps"] = ["SpatialSmoothing", "IntensityNormalization", "TemporalFiltering", "Resample"] + postprocessing_config["ProcessingStepOptions"]["Resample"]["ReferenceImage"] = str(sample_reference) + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "postProcessed.nii.gz" + + wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + base_dir=test_path, crashdump_dir=test_path) + + wf.run() + + if write_graph: + wf.write_graph(dotfilename = test_path / "postProcessSubjectFlow", graph2use=write_graph) + + if plot_img: + helpers.plot_4D_img_slice(out_path, "postProcessed.png") + + assert True From d91c7b17216497378e7fdeabe2989e56966c7afb Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 13:17:21 -0500 Subject: [PATCH 49/82] Create a module specifically for postprocess2's cli. Use the cli folder for future cli tests. --- tests/cli/test_postprocess2.py | 49 ++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 tests/cli/test_postprocess2.py diff --git a/tests/cli/test_postprocess2.py b/tests/cli/test_postprocess2.py new file mode 100644 index 00000000..6e444297 --- /dev/null +++ b/tests/cli/test_postprocess2.py @@ -0,0 +1,49 @@ +import pytest +import traceback + +from clpipe.cli import * +from pathlib import Path +from click.testing import CliRunner + + +def test_postprocess_cli_debug(clpipe_fmriprep_dir, artifact_dir, helpers, request): + """Note: this test always passes because click does its own exit code handling - but this lets one trace through the cli with a debugger""" + + fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" + config = clpipe_fmriprep_dir / "clpipe_config.json" + glm_config = clpipe_fmriprep_dir / "glm_config.json" + test_dir = helpers.create_test_dir(artifact_dir, request.node.name) + postproc_dir = Path(test_dir / "data_postprocessed") + log_dir = Path(test_dir / "logs" / "postproc_logs") + log_dir.mkdir(parents=True, exist_ok=True) + + with pytest.raises(SystemExit): + fmri_postprocess2_cli(['-config_file', str(config), + '-target_dir', str(fmriprep_dir), + '-output_dir', str(postproc_dir), + '-glm_config_file', str(glm_config), + '-log_dir', str(log_dir), + '-no-batch', '-debug']) + + +def test_postprocess_cli(clpipe_fmriprep_dir, artifact_dir, helpers, request): + fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" + config = clpipe_fmriprep_dir / "clpipe_config.json" + glm_config = clpipe_fmriprep_dir / "glm_config.json" + test_dir = helpers.create_test_dir(artifact_dir, request.node.name) + postproc_dir = Path(test_dir / "data_postprocessed") + log_dir = Path(test_dir / "logs" / "postproc_logs") + log_dir.mkdir(parents=True, exist_ok=True) + + runner = CliRunner() + result = runner.invoke( + fmri_postprocess2_cli, + ['-config_file', str(config), + '-target_dir', str(fmriprep_dir), + '-output_dir', str(postproc_dir), + '-glm_config_file', str(glm_config), + '-log_dir', str(log_dir), + 'dsfdsf', '-debug'] + ) + traceback.print_exc(result.exception) + assert result.stderr == 0 \ No newline at end of file From 57faa4d29a7935c47eb94a8b0ef40c6581bbf347 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 13:18:24 -0500 Subject: [PATCH 50/82] Rename postprocess2 modules to remove redundancy from name. --- .../{test_postprocess2_func.py => test_func.py} | 0 .../{test_postprocess2_wfs.py => test_wfs.py} | 9 +-------- 2 files changed, 1 insertion(+), 8 deletions(-) rename tests/postprocess2/{test_postprocess2_func.py => test_func.py} (100%) rename tests/postprocess2/{test_postprocess2_wfs.py => test_wfs.py} (96%) diff --git a/tests/postprocess2/test_postprocess2_func.py b/tests/postprocess2/test_func.py similarity index 100% rename from tests/postprocess2/test_postprocess2_func.py rename to tests/postprocess2/test_func.py diff --git a/tests/postprocess2/test_postprocess2_wfs.py b/tests/postprocess2/test_wfs.py similarity index 96% rename from tests/postprocess2/test_postprocess2_wfs.py rename to tests/postprocess2/test_wfs.py index 46906265..008a4145 100644 --- a/tests/postprocess2/test_postprocess2_wfs.py +++ b/tests/postprocess2/test_wfs.py @@ -1,19 +1,12 @@ import pytest -import nipype.pipeline.engine as pe -import nibabel as nib -from nilearn import plotting -from nilearn.image import load_img, index_img -import os -from pathlib import Path - from clpipe.postprocutils.workflows import * def test_spatial_smoothing_wf(artifact_dir, request, sample_raw_image, sample_raw_image_mask, plot_img, write_graph, helpers): test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "smoothed.nii.gz" - wf = build_spatial_smoothing_workflow(in_file=sample_raw_image, out_file=out_path, fwhm_mm=6, + wf = build_SUSAN_workflow(in_file=sample_raw_image, out_file=out_path, fwhm_mm=6, mask_path=sample_raw_image_mask, base_dir=test_path, crashdump_dir=test_path) wf.run() From cddb7e6575b2c32533dd190b0cbc71ff96f65ec2 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 13:18:50 -0500 Subject: [PATCH 51/82] Move dcm2bids and setup tests back up to main test folder. --- tests/{bids/test_conversion.py => test_dcm2bids.py} | 0 tests/{setup => }/test_setup.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename tests/{bids/test_conversion.py => test_dcm2bids.py} (100%) rename tests/{setup => }/test_setup.py (100%) diff --git a/tests/bids/test_conversion.py b/tests/test_dcm2bids.py similarity index 100% rename from tests/bids/test_conversion.py rename to tests/test_dcm2bids.py diff --git a/tests/setup/test_setup.py b/tests/test_setup.py similarity index 100% rename from tests/setup/test_setup.py rename to tests/test_setup.py From c9e75ffdb0aa40160c36971877a120b02aa04a39 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 13:25:02 -0500 Subject: [PATCH 52/82] Remove postprocess2 test module and replace with test_controllers.py. Merge test_func.py into this module. --- ...est_postprocess2.py => test_controllers.py} | 15 +++++++++++++++ tests/postprocess2/test_func.py | 18 ------------------ 2 files changed, 15 insertions(+), 18 deletions(-) rename tests/postprocess2/{test_postprocess2.py => test_controllers.py} (91%) delete mode 100644 tests/postprocess2/test_func.py diff --git a/tests/postprocess2/test_postprocess2.py b/tests/postprocess2/test_controllers.py similarity index 91% rename from tests/postprocess2/test_postprocess2.py rename to tests/postprocess2/test_controllers.py index 3194a6e2..7a85c82c 100644 --- a/tests/postprocess2/test_postprocess2.py +++ b/tests/postprocess2/test_controllers.py @@ -5,6 +5,21 @@ from pathlib import Path +def test_postprocess_subjects_dir(clpipe_fmriprep_dir, artifact_dir, helpers, request): + fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" + config = clpipe_fmriprep_dir / "clpipe_config.json" + bids_dir = clpipe_fmriprep_dir / "data_BIDS" + test_dir = helpers.create_test_dir(artifact_dir, request.node.name) + postproc_dir = Path(test_dir / "data_postprocessed") + log_dir = Path(test_dir / "logs" / "postproc_logs") + log_dir.mkdir(parents=True, exist_ok=True) + + + with pytest.raises(SystemExit): + postprocess_subjects(config_file=config, fmriprep_dir=fmriprep_dir, bids_dir=bids_dir, + output_dir=postproc_dir, log_dir=log_dir) + + def test_postprocess_subjects_dir(clpipe_fmriprep_dir, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" config = clpipe_fmriprep_dir / "clpipe_config.json" diff --git a/tests/postprocess2/test_func.py b/tests/postprocess2/test_func.py deleted file mode 100644 index a4fe0243..00000000 --- a/tests/postprocess2/test_func.py +++ /dev/null @@ -1,18 +0,0 @@ -import pytest -from pathlib import Path - -from clpipe.fmri_postprocess2 import * - -def test_postprocess_fmriprep_dir(clpipe_fmriprep_dir, artifact_dir, helpers, request): - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" - config = clpipe_fmriprep_dir / "clpipe_config.json" - bids_dir = clpipe_fmriprep_dir / "data_BIDS" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - postproc_dir = Path(test_dir / "data_postprocessed") - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - - - with pytest.raises(SystemExit): - postprocess_subjects_controller(config_file=config, fmriprep_dir=fmriprep_dir, bids_dir=bids_dir, - output_dir=postproc_dir, log_dir=log_dir) \ No newline at end of file From 247476143708678a306c3824b295d2b314daf167 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 13:28:03 -0500 Subject: [PATCH 53/82] Correct reference to susan workflow builder. --- tests/postprocess2/test_wfs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/postprocess2/test_wfs.py b/tests/postprocess2/test_wfs.py index 008a4145..73eb03d3 100644 --- a/tests/postprocess2/test_wfs.py +++ b/tests/postprocess2/test_wfs.py @@ -22,7 +22,7 @@ def test_spatial_smoothing_wf_no_mask(artifact_dir, request, sample_raw_image, p test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "smoothed.nii.gz" - wf = build_spatial_smoothing_workflow(in_file=sample_raw_image, out_file=out_path, fwhm_mm=6, + wf = build_SUSAN_workflow(in_file=sample_raw_image, out_file=out_path, fwhm_mm=6, base_dir=test_path, crashdump_dir=test_path) wf.run() @@ -104,7 +104,7 @@ def test_confound_regression_fsl_glm_wf(artifact_dir, sample_raw_image, sample_p regressed_path = test_path / "sample_raw_regressed.nii" - wf = build_confound_regression_fsl_glm_wf(confound_file = sample_postprocessed_confounds, in_file=sample_raw_image, out_file=regressed_path, + wf = build_confound_regression_fsl_glm_workflow(confound_file = sample_postprocessed_confounds, in_file=sample_raw_image, out_file=regressed_path, mask_file=sample_raw_image_mask, base_dir=test_path, crashdump_dir=test_path) wf.run() From 4710f151e9e17106cb2785f2dfbc9bfa4aad6416 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 13:32:11 -0500 Subject: [PATCH 54/82] Correct parameter name. --- tests/postprocess2/test_wfs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/postprocess2/test_wfs.py b/tests/postprocess2/test_wfs.py index 73eb03d3..6e508e8d 100644 --- a/tests/postprocess2/test_wfs.py +++ b/tests/postprocess2/test_wfs.py @@ -119,7 +119,7 @@ def test_confound_regression_afni_3dTproject_wf(artifact_dir, sample_raw_image, regressed_path = test_path / "sample_raw_regressed.nii.gz" - wf = build_confound_regression_afni_3dTproject(confound_file = sample_postprocessed_confounds, in_file=sample_raw_image, out_file=regressed_path, + wf = build_confound_regression_afni_3dTproject(confounds_file=sample_postprocessed_confounds, in_file=sample_raw_image, out_file=regressed_path, mask_file=sample_raw_image_mask, base_dir=test_path, crashdump_dir=test_path) wf.run() From f3ea8f687e3e7786dd4febaf4b3633743cede068 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 13:54:55 -0500 Subject: [PATCH 55/82] Fix no mask wf test so that it doesn't include the ApplyMask step. Modify the image workflow builder to throw a ValueError if no ApplyMask is requested, but the input value of the mask is None. --- clpipe/postprocutils/workflows.py | 2 ++ tests/postprocess2/test_wf_chains.py | 35 +++++++++++++++------------- 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/clpipe/postprocutils/workflows.py b/clpipe/postprocutils/workflows.py index 7b63f3ca..de5d0575 100644 --- a/clpipe/postprocutils/workflows.py +++ b/clpipe/postprocutils/workflows.py @@ -173,6 +173,8 @@ def build_image_postprocessing_workflow(postprocessing_config: dict, in_file: os postproc_wf.connect(input_node, "confounds_file", current_wf, "inputnode.confounds_file") elif step == STEP_APPLY_MASK: + if mask_file is None: + raise ValueError(f"{STEP_APPLY_MASK}: No mask file provided.") current_wf = build_apply_mask_workflow(mask_file=mask_file, base_dir=postproc_wf.base_dir, crashdump_dir=crashdump_dir) elif step == STEP_TRIM_TIMEPOINTS: diff --git a/tests/postprocess2/test_wf_chains.py b/tests/postprocess2/test_wf_chains.py index f3d3d8a8..51cb62e5 100644 --- a/tests/postprocess2/test_wf_chains.py +++ b/tests/postprocess2/test_wf_chains.py @@ -10,8 +10,8 @@ def test_postprocess2_wf_2_steps(artifact_dir, postprocessing_config, request, s test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, + wf = build_image_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, export_path=out_path, tr=2, mask_file=sample_raw_image_mask, + confounds_file=sample_confounds_timeseries, base_dir=test_path, crashdump_dir=test_path) wf.run() @@ -33,8 +33,8 @@ def test_postprocess2_wf_3_steps(artifact_dir, postprocessing_config, request, s test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, + wf = build_image_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, export_path=out_path, tr=2, mask_file=sample_raw_image_mask, + confounds_file=sample_confounds_timeseries, base_dir=test_path, crashdump_dir=test_path) wf.run() @@ -56,8 +56,8 @@ def test_postprocess2_wf_1_step(artifact_dir, postprocessing_config, request, sa test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, + wf = build_image_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, export_path=out_path, tr=2, mask_file=sample_raw_image_mask, + confounds_file=sample_confounds_timeseries, base_dir=test_path, crashdump_dir=test_path) wf.run() @@ -80,8 +80,8 @@ def test_postprocess2_wf_confound_regression_last(artifact_dir, postprocessing_c test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, + wf = build_image_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, export_path=out_path, tr=2, mask_file=sample_raw_image_mask, + confounds_file=sample_confounds_timeseries, base_dir=test_path, crashdump_dir=test_path) wf.run() @@ -103,8 +103,8 @@ def test_postprocess2_wf_confound_regression_first(artifact_dir, postprocessing_ test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, + wf = build_image_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, export_path=out_path, tr=2, mask_file=sample_raw_image_mask, + confounds_file=sample_confounds_timeseries, base_dir=test_path, crashdump_dir=test_path) wf.run() @@ -126,7 +126,7 @@ def test_postprocess2_wf_aroma(artifact_dir, postprocessing_config, request, sam test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + wf = build_image_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, export_path=out_path, tr=2, mask_file=sample_raw_image_mask, mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, base_dir=test_path, crashdump_dir=test_path) @@ -149,7 +149,7 @@ def test_postprocess2_wf_aroma_last(artifact_dir, postprocessing_config, request test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + wf = build_image_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, export_path=out_path, tr=2, mask_file=sample_raw_image_mask, mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, base_dir=test_path, crashdump_dir=test_path) @@ -169,7 +169,10 @@ def test_postprocess2_wf_no_mask(artifact_dir, postprocessing_config, request, s test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, sample_raw_image, out_path, 2, + postprocessing_config["ProcessingSteps"] = ["TemporalFiltering", "SpatialSmoothing", "IntensityNormalization"] + + wf = build_image_postprocessing_workflow( + postprocessing_config, sample_raw_image, out_path, tr=2, base_dir=test_path, crashdump_dir=test_path) wf.run() @@ -192,8 +195,8 @@ def test_postprocess2_wf_fslmaths_temporal_filter(artifact_dir, postprocessing_c test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, - confound_file=sample_confounds_timeseries, + wf = build_image_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, export_path=out_path, tr=2, mask_file=sample_raw_image_mask, + confounds_file=sample_confounds_timeseries, base_dir=test_path, crashdump_dir=test_path) wf.run() @@ -215,7 +218,7 @@ def test_postprocess2_wf_resample(artifact_dir, postprocessing_config, request, test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "postProcessed.nii.gz" - wf = build_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, out_file=out_path, tr=2, mask_file=sample_raw_image_mask, + wf = build_image_postprocessing_workflow(postprocessing_config, in_file=sample_raw_image, export_path=out_path, tr=2, mask_file=sample_raw_image_mask, base_dir=test_path, crashdump_dir=test_path) wf.run() From 23d23ba93828faddd794469e59bad86a95ad9206 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 14:02:40 -0500 Subject: [PATCH 56/82] Adjust references for postprocess_subject tests. --- tests/postprocess2/test_controllers.py | 29 ++++++-------------------- 1 file changed, 6 insertions(+), 23 deletions(-) diff --git a/tests/postprocess2/test_controllers.py b/tests/postprocess2/test_controllers.py index 7a85c82c..ec6d2840 100644 --- a/tests/postprocess2/test_controllers.py +++ b/tests/postprocess2/test_controllers.py @@ -14,7 +14,6 @@ def test_postprocess_subjects_dir(clpipe_fmriprep_dir, artifact_dir, helpers, re log_dir = Path(test_dir / "logs" / "postproc_logs") log_dir.mkdir(parents=True, exist_ok=True) - with pytest.raises(SystemExit): postprocess_subjects(config_file=config, fmriprep_dir=fmriprep_dir, bids_dir=bids_dir, output_dir=postproc_dir, log_dir=log_dir) @@ -23,14 +22,14 @@ def test_postprocess_subjects_dir(clpipe_fmriprep_dir, artifact_dir, helpers, re def test_postprocess_subjects_dir(clpipe_fmriprep_dir, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" config = clpipe_fmriprep_dir / "clpipe_config.json" - glm_config = clpipe_fmriprep_dir / "glm_config.json" test_dir = helpers.create_test_dir(artifact_dir, request.node.name) postproc_dir = Path(test_dir / "data_postprocessed") log_dir = Path(test_dir / "logs" / "postproc_logs") log_dir.mkdir(parents=True, exist_ok=True) - postprocess_subjects(config_file=config, glm_config_file=glm_config, fmriprep_dir=fmriprep_dir, - output_dir=postproc_dir, log_dir=log_dir) + with pytest.raises(SystemExit): + postprocess_subjects(config_file=config, fmriprep_dir=fmriprep_dir, + output_dir=postproc_dir, log_dir=log_dir) def test_postprocess_subjects_dir_config_only(clpipe_fmriprep_dir): @@ -43,30 +42,14 @@ def test_postprocess_subjects_dir_config_only(clpipe_fmriprep_dir): def test_postprocess_subjects_dir_invalid_subject(clpipe_fmriprep_dir, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" config = clpipe_fmriprep_dir / "clpipe_config.json" - glm_config = clpipe_fmriprep_dir / "glm_config.json" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - postproc_dir = Path(test_dir / "data_postprocessed") - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - - postprocess_subjects(subjects=['99'], config_file=config, glm_config_file=glm_config, fmriprep_dir=fmriprep_dir, - output_dir=postproc_dir, log_dir=log_dir) - - -def test_postprocess_subjects_job(clpipe_fmriprep_dir, artifact_dir, helpers, request): - config = clpipe_fmriprep_dir / "clpipe_config.json" - bids_dir = clpipe_fmriprep_dir / "data_BIDS" - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" test_dir = helpers.create_test_dir(artifact_dir, request.node.name) postproc_dir = Path(test_dir / "data_postprocessed") - #pybids_db_path = Path(test_dir / "bids_index") - pybids_db_path = None - log_dir = Path(test_dir / "logs" / "postproc_logs") log_dir.mkdir(parents=True, exist_ok=True) - postprocess_subjects(bids_dir, fmriprep_dir, postproc_dir, config, - log_dir=log_dir, pybids_db_path=pybids_db_path) + with pytest.raises(SystemExit): + postprocess_subjects(subjects=['99'], config_file=config, fmriprep_dir=fmriprep_dir, + output_dir=postproc_dir, log_dir=log_dir) def test_prepare_confounds(sample_confounds_timeseries, postprocessing_config, artifact_dir, helpers, request): From 16895d4b1924f8a844c8240cc71e92d28fc3780a Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 14:03:54 -0500 Subject: [PATCH 57/82] Move confounds wf tests to wf_chains. --- tests/postprocess2/test_controllers.py | 29 -------------------------- tests/postprocess2/test_wf_chains.py | 29 ++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/tests/postprocess2/test_controllers.py b/tests/postprocess2/test_controllers.py index ec6d2840..2cdf8d49 100644 --- a/tests/postprocess2/test_controllers.py +++ b/tests/postprocess2/test_controllers.py @@ -52,35 +52,6 @@ def test_postprocess_subjects_dir_invalid_subject(clpipe_fmriprep_dir, artifact_ output_dir=postproc_dir, log_dir=log_dir) -def test_prepare_confounds(sample_confounds_timeseries, postprocessing_config, artifact_dir, helpers, request): - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "new_confounds.tsv" - - cf_workflow = build_confounds_processing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, - out_file=out_path, base_dir=test_path, crashdump_dir=test_path, tr=2) - - cf_workflow.run() - - assert True - - -def test_prepare_confounds_aroma(sample_confounds_timeseries, postprocessing_config, sample_melodic_mixing, sample_aroma_noise_ics, - artifact_dir, helpers, request): - - test_path = helpers.create_test_dir(artifact_dir, request.node.name) - out_path = test_path / "new_confounds.tsv" - - postprocessing_config["ProcessingSteps"] = ["AROMARegression", "TemporalFiltering", "IntensityNormalization"] - - cf_workflow = build_confounds_processing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, - out_file=out_path, mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, - base_dir=test_path, crashdump_dir=test_path, tr=2) - - cf_workflow.run() - - assert True - - def test_postprocess_subject_job_setup(clpipe_fmriprep_dir, artifact_dir, helpers, request): fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" bids_dir = clpipe_fmriprep_dir / "data_BIDS" diff --git a/tests/postprocess2/test_wf_chains.py b/tests/postprocess2/test_wf_chains.py index 51cb62e5..78a8c58f 100644 --- a/tests/postprocess2/test_wf_chains.py +++ b/tests/postprocess2/test_wf_chains.py @@ -230,3 +230,32 @@ def test_postprocess2_wf_resample(artifact_dir, postprocessing_config, request, helpers.plot_4D_img_slice(out_path, "postProcessed.png") assert True + + +def test_prepare_confounds(sample_confounds_timeseries, postprocessing_config, artifact_dir, helpers, request): + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "new_confounds.tsv" + + cf_workflow = build_confounds_processing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, + out_file=out_path, base_dir=test_path, crashdump_dir=test_path, tr=2) + + cf_workflow.run() + + assert True + + +def test_prepare_confounds_aroma(sample_confounds_timeseries, postprocessing_config, sample_melodic_mixing, sample_aroma_noise_ics, + artifact_dir, helpers, request): + + test_path = helpers.create_test_dir(artifact_dir, request.node.name) + out_path = test_path / "new_confounds.tsv" + + postprocessing_config["ProcessingSteps"] = ["AROMARegression", "TemporalFiltering", "IntensityNormalization"] + + cf_workflow = build_confounds_processing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, + out_file=out_path, mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, + base_dir=test_path, crashdump_dir=test_path, tr=2) + + cf_workflow.run() + + assert True From b97d228a33daafe48db45072a1b51ae4d8997e0f Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 14:10:06 -0500 Subject: [PATCH 58/82] Remove now irrelevant tests. --- tests/postprocess2/test_controllers.py | 68 +------------------------- 1 file changed, 1 insertion(+), 67 deletions(-) diff --git a/tests/postprocess2/test_controllers.py b/tests/postprocess2/test_controllers.py index 2cdf8d49..6a6eb1d8 100644 --- a/tests/postprocess2/test_controllers.py +++ b/tests/postprocess2/test_controllers.py @@ -49,70 +49,4 @@ def test_postprocess_subjects_dir_invalid_subject(clpipe_fmriprep_dir, artifact_ with pytest.raises(SystemExit): postprocess_subjects(subjects=['99'], config_file=config, fmriprep_dir=fmriprep_dir, - output_dir=postproc_dir, log_dir=log_dir) - - -def test_postprocess_subject_job_setup(clpipe_fmriprep_dir, artifact_dir, helpers, request): - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" - bids_dir = clpipe_fmriprep_dir / "data_BIDS" - config = clpipe_fmriprep_dir / "clpipe_config.json" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - postproc_dir = Path(test_dir / "data_postprocessed") - postproc_dir.mkdir(exist_ok=True) - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - pybids_db_path = test_dir / "BIDS_index" - - postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config, pybids_db_path=pybids_db_path, log_dir=log_dir) - - -def test_postprocess_subject_job(clpipe_fmriprep_dir, config_file, artifact_dir, helpers, request): - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" - bids_dir = clpipe_fmriprep_dir / "data_BIDS" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - postproc_dir = Path(test_dir / "data_postprocessed") - postproc_dir.mkdir(exist_ok=True) - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - pybids_db_path = test_dir / "BIDS_index" - - postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config_file, pybids_db_path=pybids_db_path, log_dir=log_dir) - - -def test_postprocess_subject_with_confounds(clpipe_fmriprep_dir, config_file_confounds, artifact_dir, helpers, request): - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" - bids_dir = clpipe_fmriprep_dir / "data_BIDS" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - pybids_db_path = test_dir / "bids_index" - postproc_dir = Path(test_dir / "data_postprocessed") - postproc_dir.mkdir(exist_ok=True) - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - - postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config_file_confounds, pybids_db_path=pybids_db_path, log_dir=log_dir) - - -def test_postprocess_subject_aroma(clpipe_fmriprep_dir, config_file_aroma, artifact_dir, helpers, request): - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" - bids_dir = clpipe_fmriprep_dir / "data_BIDS" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - pybids_db_path = test_dir / "bids_index" - postproc_dir = Path(test_dir / "data_postprocessed") - postproc_dir.mkdir(exist_ok=True) - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - - postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config_file_aroma, pybids_db_path=pybids_db_path, log_dir=log_dir) - - -def test_postprocess_subject_aroma_with_confound_processing(clpipe_fmriprep_dir, config_file_aroma_confounds, artifact_dir, helpers, request): - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" - bids_dir = clpipe_fmriprep_dir / "data_BIDS" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - pybids_db_path = test_dir / "bids_index" - postproc_dir = Path(test_dir / "data_postprocessed") - postproc_dir.mkdir(exist_ok=True) - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - - postprocess_subject('1', bids_dir, fmriprep_dir, postproc_dir, config_file_aroma_confounds, pybids_db_path=pybids_db_path, log_dir=log_dir) + output_dir=postproc_dir, log_dir=log_dir) \ No newline at end of file From be8ea516d98407f64b0490cfbfd1058aa9062221 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 14:10:48 -0500 Subject: [PATCH 59/82] Rename controllers to functions in anticipation of the cli functions potentially operating as controllers. --- tests/postprocess2/{test_controllers.py => test_functions.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/postprocess2/{test_controllers.py => test_functions.py} (100%) diff --git a/tests/postprocess2/test_controllers.py b/tests/postprocess2/test_functions.py similarity index 100% rename from tests/postprocess2/test_controllers.py rename to tests/postprocess2/test_functions.py From b4e94db1ca7c4c6dfd492632809e9510a022ef6c Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Tue, 14 Feb 2023 14:13:58 -0500 Subject: [PATCH 60/82] Remove redundant test. --- tests/postprocess2/test_functions.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/tests/postprocess2/test_functions.py b/tests/postprocess2/test_functions.py index 6a6eb1d8..4bcc775a 100644 --- a/tests/postprocess2/test_functions.py +++ b/tests/postprocess2/test_functions.py @@ -19,19 +19,6 @@ def test_postprocess_subjects_dir(clpipe_fmriprep_dir, artifact_dir, helpers, re output_dir=postproc_dir, log_dir=log_dir) -def test_postprocess_subjects_dir(clpipe_fmriprep_dir, artifact_dir, helpers, request): - fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" - config = clpipe_fmriprep_dir / "clpipe_config.json" - test_dir = helpers.create_test_dir(artifact_dir, request.node.name) - postproc_dir = Path(test_dir / "data_postprocessed") - log_dir = Path(test_dir / "logs" / "postproc_logs") - log_dir.mkdir(parents=True, exist_ok=True) - - with pytest.raises(SystemExit): - postprocess_subjects(config_file=config, fmriprep_dir=fmriprep_dir, - output_dir=postproc_dir, log_dir=log_dir) - - def test_postprocess_subjects_dir_config_only(clpipe_fmriprep_dir): config = clpipe_fmriprep_dir / "clpipe_config.json" From 638559177873fb2c8114acd186df0a7e1f6d218b Mon Sep 17 00:00:00 2001 From: bhavithmanapoty Date: Wed, 15 Feb 2023 22:31:04 -0500 Subject: [PATCH 61/82] added tests --- tests/conftest.py | 6 ++++++ tests/test_setup.py | 25 +++++++++++++------------ 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 1415581a..7881fa67 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -92,6 +92,12 @@ def project_dir(tmp_path_factory): proj_path = tmp_path_factory.mktemp(PROJECT_TITLE) return Path(proj_path) +@pytest.fixture(scope="function") +def source_data(tmp_path_factory): + """Fixture which provides a temporary space for a source data folder.""" + source_path = tmp_path_factory.mktemp("source_data") + return Path(source_path) + @pytest.fixture(scope="module") def clpipe_dir(tmp_path_factory): diff --git a/tests/test_setup.py b/tests/test_setup.py index e3762683..c7a1c5b5 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -1,5 +1,6 @@ import pytest from pathlib import Path +import os from clpipe.project_setup import project_setup @@ -15,28 +16,28 @@ def test_setup_no_source(project_dir): assert Path(project_dir / "data_DICOMs").exists() -def test_setup_referenced_source(project_dir): +def test_setup_referenced_source(project_dir, source_data): """Check that clpipe's generated config file references a specified source directory that is not within the clpipe project directory. This variant should not create a data_DICOMs directory. - - project_setup(project_title="Test", project_dir="TestDir", - source_data="TestSource", move_source_data=False, - symlink_source_data=False, debug=False) """ - assert False + project_setup(project_title=PROJECT_TITLE, project_dir=project_dir, + source_data=source_data, move_source_data=False, + symlink_source_data=False, debug=False) + + assert not Path(project_dir / "data_DICOMs").exists() and Path(project_dir / source_data).exists() -def test_setup_symlink_source(project_dir): +def test_setup_symlink_source(project_dir, source_data): """Check that clpipe creates a data_DICOMs dir and symlinks it to the given source data. - - project_setup(project_title="Test", project_dir="TestDir", - source_data="TestSource", move_source_data=False, - symlink_source_data=True, debug=False) """ - assert False + project_setup(project_title=PROJECT_TITLE, project_dir=project_dir, + source_data=source_data, move_source_data=False, + symlink_source_data=True, debug=False) + + assert Path(project_dir / "data_DICOMs").exists() and os.path.islink(project_dir / "data_DICOMs") @pytest.mark.skip(reason="Feature Not implemented") def test_setup_move_source(project_dir): From 2db670b6d99cb5d753b05062ecfc9abd73ce9751 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Thu, 16 Feb 2023 12:54:41 -0500 Subject: [PATCH 62/82] Remove project dir from test path of source data. --- tests/test_setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_setup.py b/tests/test_setup.py index c7a1c5b5..24e710d1 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -25,7 +25,7 @@ def test_setup_referenced_source(project_dir, source_data): source_data=source_data, move_source_data=False, symlink_source_data=False, debug=False) - assert not Path(project_dir / "data_DICOMs").exists() and Path(project_dir / source_data).exists() + assert not Path(project_dir / "data_DICOMs").exists() and Path(source_data).exists() def test_setup_symlink_source(project_dir, source_data): From 0b851786836891bb2306df483ac8319d9b961bb6 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 20 Feb 2023 12:21:08 -0500 Subject: [PATCH 63/82] Add a test module for configuration. --- tests/test_config.py | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 tests/test_config.py diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 00000000..a411f0cd --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,43 @@ +import pytest + +from clpipe.config import * + + +@pytest.mark.skip(reason="Test Not Implemented") +def test_config_default(clpipe_config_default): + """ Ensure that the default config file (data/defaltConfig.json) + is successfully loaded into the configuration object. + """ + + assert False + # e.g. test a few fields + # assert config.project_title = "" + # assert config.convert2bids.bids_dir = "" + + +@pytest.mark.skip(reason="Test Not Implemented") +def test_config_extra_fields(config_file): + """ Ensure that the intial config for a test project is successfully loaded.""" + assert False + + +# Could consider creating test config files for these case-specific config tests +# that follow. These could go in tests/data/config_files +# Or we could just load in a base config file and modify the dictionary in code to +# get the test case we want. + + +@pytest.mark.skip(reason="Test Not Implemented") +def test_config_wrong_order(): + """ Ensure that a config file with fields in an unexpected order will successfully + load. + """ + assert False + + +@pytest.mark.skip(reason="Test Not Implemented") +def test_config_author_contributor(): + """ Check that the conversion of the Authors/Contributors to just 'Contributors' + works successfully. + """ + assert False \ No newline at end of file From da3f3a4b64dbe471bd5e369fa840d0ed681d0879 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 20 Feb 2023 12:31:18 -0500 Subject: [PATCH 64/82] Add a short script to load modules needed for development. --- setup_modules.sh | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 setup_modules.sh diff --git a/setup_modules.sh b/setup_modules.sh new file mode 100644 index 00000000..51786d0c --- /dev/null +++ b/setup_modules.sh @@ -0,0 +1,13 @@ +# Setup modules needed for the development environment + +# Clear any loaded modules +module purge + +# Load all dependent modules +module load fsl/6.0.3 +module load freesurfer/6.0.0 +module load afni/20.3.00 +module load r/4.2.1 +module load flywheel/16.4.0 +module load dcm2niix/1.0.20211006 +module load flywheel/16.4.0 \ No newline at end of file From 313879a0039380c9dcffd2e974c5c0aa1349553a Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 20 Feb 2023 13:55:59 -0500 Subject: [PATCH 65/82] Add a simple test for bids_validate --- tests/test_bids_validate.py | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 tests/test_bids_validate.py diff --git a/tests/test_bids_validate.py b/tests/test_bids_validate.py new file mode 100644 index 00000000..d2b3a786 --- /dev/null +++ b/tests/test_bids_validate.py @@ -0,0 +1,8 @@ +import pytest +from clpipe.bids_validator import bids_validate + +def test_bids_validate(config_file): + """ Check basic attempt to run bids_validate.""" + + bids_validate(config_file=config_file) + assert True \ No newline at end of file From e4c7bfaf399753d8a302bcd22ab760da45be93ea Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 20 Feb 2023 14:05:05 -0500 Subject: [PATCH 66/82] Add some more configuration tests and add a fixture for getting the test project's configuration as a dictionary. --- tests/conftest.py | 6 ++++++ tests/test_config.py | 34 ++++++++++++++++++++++------------ 2 files changed, 28 insertions(+), 12 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7881fa67..f8b0b923 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -310,6 +310,12 @@ def sample_reference() -> Path: def config_file(clpipe_dir): return clpipe_dir / "clpipe_config.json" +@pytest.fixture(scope="module") +def clpipe_config(config_file): + with open(config_file, 'r') as f: + config_dict = json.load(f) + return config_dict + @pytest.fixture(scope="module") def config_file_confounds(clpipe_config_default, config_file): clpipe_config_default["PostProcessingOptions2"]["ConfoundOptions"]["Include"] = True diff --git a/tests/test_config.py b/tests/test_config.py index a411f0cd..76df901c 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,10 +2,26 @@ from clpipe.config import * +@pytest.mark.skip(reason="Test Not Implemented") +def test_json_load(config_file): + """ Ensure that the config class loads in .json data as expected. """ + + assert False + + +@pytest.mark.skip(reason="Feature Not Implemented") +def test_yaml_load(): + """ Ensure that the config class loads from .yaml as expected. """ + + assert False + + +# Using dictionaries over file references from this point on - no need to test +# json.load() @pytest.mark.skip(reason="Test Not Implemented") -def test_config_default(clpipe_config_default): - """ Ensure that the default config file (data/defaltConfig.json) +def test_default(clpipe_config_default): + """ Ensure that data from the default config file (data/defaltConfig.json) is successfully loaded into the configuration object. """ @@ -16,27 +32,21 @@ def test_config_default(clpipe_config_default): @pytest.mark.skip(reason="Test Not Implemented") -def test_config_extra_fields(config_file): +def test_extra_fields(clpipe_config): """ Ensure that the intial config for a test project is successfully loaded.""" assert False -# Could consider creating test config files for these case-specific config tests -# that follow. These could go in tests/data/config_files -# Or we could just load in a base config file and modify the dictionary in code to -# get the test case we want. - - @pytest.mark.skip(reason="Test Not Implemented") -def test_config_wrong_order(): - """ Ensure that a config file with fields in an unexpected order will successfully +def test_wrong_order(clpipe_config): + """ Ensure that a configuration with fields in an unexpected order will successfully load. """ assert False @pytest.mark.skip(reason="Test Not Implemented") -def test_config_author_contributor(): +def test_author_contributor(clpipe_config): """ Check that the conversion of the Authors/Contributors to just 'Contributors' works successfully. """ From 7d95966356c1f86740ed453d733b227ef18ba07d Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Mon, 20 Feb 2023 14:08:50 -0500 Subject: [PATCH 67/82] Add simple test for fmri_preprocess. --- tests/test_fmri_preprocess.py | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 tests/test_fmri_preprocess.py diff --git a/tests/test_fmri_preprocess.py b/tests/test_fmri_preprocess.py new file mode 100644 index 00000000..5b933da2 --- /dev/null +++ b/tests/test_fmri_preprocess.py @@ -0,0 +1,8 @@ +import pytest +from clpipe.fmri_preprocess import fmriprep_process + +def test_fmriprep_process(config_file): + """ Check basic attempt to run fmriprep_process.""" + + fmriprep_process(config_file=config_file) + assert True \ No newline at end of file From f6d0ce216395821eeb5154b94385f921518cf1ff Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 12:41:55 -0500 Subject: [PATCH 68/82] Add simple test for glm setup and add a fixture for it that gives back a project-based glm_config_file. --- tests/conftest.py | 13 +++++++++++++ tests/test_glm.py | 8 ++++++++ 2 files changed, 21 insertions(+) create mode 100644 tests/test_glm.py diff --git a/tests/conftest.py b/tests/conftest.py index f8b0b923..6b0e299e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -310,6 +310,19 @@ def sample_reference() -> Path: def config_file(clpipe_dir): return clpipe_dir / "clpipe_config.json" +@pytest.fixture(scope="module") +def glm_config_file(clpipe_fmriprep_dir: Path) -> Path: + """Provides a reference to a glm_config.json file that + has been setup in the context of a mock project. + + Args: + clpipe_fmriprep_dir (Path): Path to a mock fmriprep clpipe project + + Returns: + Path: Reference to mock glm_config.json file. + """ + return clpipe_fmriprep_dir / "glm_config.json" + @pytest.fixture(scope="module") def clpipe_config(config_file): with open(config_file, 'r') as f: diff --git a/tests/test_glm.py b/tests/test_glm.py new file mode 100644 index 00000000..8996aafb --- /dev/null +++ b/tests/test_glm.py @@ -0,0 +1,8 @@ +import pytest +from clpipe.glm_setup import glm_setup + +def test_glm_setup(config_file: Path, glm_config_file: Path) -> None: + """Check basic attempt to run fmriprep_process.""" + + glm_setup(config_file=config_file, glm_config_file=glm_config_file) + assert True \ No newline at end of file From 380267c14a6a293c771b31d7657aa26a31331ce1 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 13:06:01 -0500 Subject: [PATCH 69/82] Rename glm_setup to match module. --- tests/{test_glm.py => test_glm_setup.py} | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) rename tests/{test_glm.py => test_glm_setup.py} (66%) diff --git a/tests/test_glm.py b/tests/test_glm_setup.py similarity index 66% rename from tests/test_glm.py rename to tests/test_glm_setup.py index 8996aafb..d23c9b4f 100644 --- a/tests/test_glm.py +++ b/tests/test_glm_setup.py @@ -1,7 +1,8 @@ import pytest +from pathlib import Path from clpipe.glm_setup import glm_setup -def test_glm_setup(config_file: Path, glm_config_file: Path) -> None: +def test_glm_setup_basic(config_file: Path, glm_config_file: Path) -> None: """Check basic attempt to run fmriprep_process.""" glm_setup(config_file=config_file, glm_config_file=glm_config_file) From 11f9bb838346e141a284348dab4528a6e0ca96a6 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 13:06:22 -0500 Subject: [PATCH 70/82] Add a basic guide for tests. --- tests/README.md | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 tests/README.md diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 00000000..e572144b --- /dev/null +++ b/tests/README.md @@ -0,0 +1,28 @@ +# Tests + +## Basic Overview + +clpipe uses pytest as its testing framework. + +All test modules belong in the `tests` folder. Each test module should use the following default pytest naming pattern: +`test_`, where `` is the name of the module being tested. + +All test functions should use the following default pytest naming pattern: +`test__`, where `` is function being tested and +`` is the unique case being tested. + +clpipe leverages pytest's [fixture](https://docs.pytest.org/en/6.2.x/fixture.html) +feature extensively to provide its test cases with +preconfigured objects for testing. **Any arguments passed into pytests tests are +fixtures**. These fixtures are defined in `tests/conftest.py`. + +clpipe's tests often leverage temporary file storage. To control where this +this location is by default, you can use the pytest flag `--basetemp`. +As clpipe's `.gitignore` includes `TestFiles` by default, this makes it a good +place to store your temporary files for easy viewing. This could be +set like this: `--basetemp TestFiles/tmp`, where `tmp` is a sub-folder manually +created to house temporary test files. + +Many clpipe tests are not fully transient, and instead save their outputs to allow +for inspection. These output files are saved to `tests/artifacts`, +which is ignored by git by default. \ No newline at end of file From 858dbac1b7bde0aa18dce8b60927deaf0d0d7f0b Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 13:13:09 -0500 Subject: [PATCH 71/82] Rename test modules to match their associated modules. --- tests/{test_dcm2bids.py => test_bids_conversion.py} | 0 tests/{test_bids_validate.py => test_bids_validator.py} | 0 tests/{test_setup.py => test_project_setup.py} | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename tests/{test_dcm2bids.py => test_bids_conversion.py} (100%) rename tests/{test_bids_validate.py => test_bids_validator.py} (100%) rename tests/{test_setup.py => test_project_setup.py} (100%) diff --git a/tests/test_dcm2bids.py b/tests/test_bids_conversion.py similarity index 100% rename from tests/test_dcm2bids.py rename to tests/test_bids_conversion.py diff --git a/tests/test_bids_validate.py b/tests/test_bids_validator.py similarity index 100% rename from tests/test_bids_validate.py rename to tests/test_bids_validator.py diff --git a/tests/test_setup.py b/tests/test_project_setup.py similarity index 100% rename from tests/test_setup.py rename to tests/test_project_setup.py From a5679106b916eeb4ea63120a3403b3beab37356c Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 13:25:29 -0500 Subject: [PATCH 72/82] Add typing to test signatures. --- tests/conftest.py | 4 ++-- tests/test_bids_conversion.py | 13 +++++++------ tests/test_bids_validator.py | 3 ++- tests/test_config.py | 18 +++++++++++------- tests/test_fmri_preprocess.py | 3 ++- tests/test_glm_setup.py | 2 +- tests/test_project_setup.py | 15 ++++++++------- 7 files changed, 33 insertions(+), 25 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 6b0e299e..a13debda 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -205,7 +205,7 @@ def clpipe_fmriprep_dir(clpipe_bids_dir, sample_raw_image, sample_raw_image_mask return clpipe_bids_dir @pytest.fixture(scope="module") -def clpipe_config_default(): +def clpipe_config_default() -> dict: return ClpipeConfigParser().config @pytest.fixture(scope="module") @@ -324,7 +324,7 @@ def glm_config_file(clpipe_fmriprep_dir: Path) -> Path: return clpipe_fmriprep_dir / "glm_config.json" @pytest.fixture(scope="module") -def clpipe_config(config_file): +def clpipe_config(config_file) -> dict: with open(config_file, 'r') as f: config_dict = json.load(f) return config_dict diff --git a/tests/test_bids_conversion.py b/tests/test_bids_conversion.py index 093069e7..c349a694 100644 --- a/tests/test_bids_conversion.py +++ b/tests/test_bids_conversion.py @@ -1,8 +1,9 @@ import pytest +from pathlib import Path from clpipe.bids_conversion import convert2bids -def test_dcm2bids(clpipe_dicom_dir, config_file): +def test_dcm2bids(clpipe_dicom_dir: Path, config_file: Path): convert2bids( dcm2bids = True, config_file = config_file, @@ -12,7 +13,7 @@ def test_dcm2bids(clpipe_dicom_dir, config_file): assert True -def test_dcm2bids_sub_session(clpipe_dicom_dir, config_file): +def test_dcm2bids_sub_session(clpipe_dicom_dir: Path, config_file: Path): convert2bids( dcm2bids = True, config_file = config_file, @@ -22,7 +23,7 @@ def test_dcm2bids_sub_session(clpipe_dicom_dir, config_file): assert True -def test_dcm2bids_sub_session_flat(clpipe_dicom_dir, config_file): +def test_dcm2bids_sub_session_flat(clpipe_dicom_dir: Path, config_file: Path): convert2bids( dcm2bids = True, config_file = config_file, @@ -32,7 +33,7 @@ def test_dcm2bids_sub_session_flat(clpipe_dicom_dir, config_file): assert True -def test_dcm2bids_session_sub(clpipe_dicom_dir, config_file): +def test_dcm2bids_session_sub(clpipe_dicom_dir: Path, config_file: Path): convert2bids( dcm2bids = True, config_file = config_file, @@ -42,7 +43,7 @@ def test_dcm2bids_session_sub(clpipe_dicom_dir, config_file): assert True -def test_dcm2bids_session_sub_flat(clpipe_dicom_dir, config_file): +def test_dcm2bids_session_sub_flat(clpipe_dicom_dir: Path, config_file: Path): convert2bids( dcm2bids = True, config_file = config_file, @@ -52,7 +53,7 @@ def test_dcm2bids_session_sub_flat(clpipe_dicom_dir, config_file): assert True -def test_heudiconv(clpipe_dicom_dir, config_file): +def test_heudiconv(clpipe_dicom_dir: Path, config_file: Path): convert2bids( dcm2bids = False, config_file = config_file, diff --git a/tests/test_bids_validator.py b/tests/test_bids_validator.py index d2b3a786..26c31171 100644 --- a/tests/test_bids_validator.py +++ b/tests/test_bids_validator.py @@ -1,7 +1,8 @@ import pytest +from pathlib import Path from clpipe.bids_validator import bids_validate -def test_bids_validate(config_file): +def test_bids_validate(config_file: Path): """ Check basic attempt to run bids_validate.""" bids_validate(config_file=config_file) diff --git a/tests/test_config.py b/tests/test_config.py index 76df901c..2edec747 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,17 +1,21 @@ import pytest +from pathlib import Path from clpipe.config import * @pytest.mark.skip(reason="Test Not Implemented") -def test_json_load(config_file): +def test_json_load(config_file: Path): """ Ensure that the config class loads in .json data as expected. """ assert False @pytest.mark.skip(reason="Feature Not Implemented") -def test_yaml_load(): - """ Ensure that the config class loads from .yaml as expected. """ +def test_yaml_load(yaml_config_file: Path): + """ Ensure that the config class loads from .yaml as expected. + + yaml_config_file fixture does not yet exist + """ assert False @@ -20,7 +24,7 @@ def test_yaml_load(): # json.load() @pytest.mark.skip(reason="Test Not Implemented") -def test_default(clpipe_config_default): +def test_default(clpipe_config_default: dict): """ Ensure that data from the default config file (data/defaltConfig.json) is successfully loaded into the configuration object. """ @@ -32,13 +36,13 @@ def test_default(clpipe_config_default): @pytest.mark.skip(reason="Test Not Implemented") -def test_extra_fields(clpipe_config): +def test_extra_fields(clpipe_config: dict): """ Ensure that the intial config for a test project is successfully loaded.""" assert False @pytest.mark.skip(reason="Test Not Implemented") -def test_wrong_order(clpipe_config): +def test_wrong_order(clpipe_config: dict): """ Ensure that a configuration with fields in an unexpected order will successfully load. """ @@ -46,7 +50,7 @@ def test_wrong_order(clpipe_config): @pytest.mark.skip(reason="Test Not Implemented") -def test_author_contributor(clpipe_config): +def test_author_contributor(clpipe_config: dict): """ Check that the conversion of the Authors/Contributors to just 'Contributors' works successfully. """ diff --git a/tests/test_fmri_preprocess.py b/tests/test_fmri_preprocess.py index 5b933da2..8ea2132f 100644 --- a/tests/test_fmri_preprocess.py +++ b/tests/test_fmri_preprocess.py @@ -1,7 +1,8 @@ import pytest +from pathlib import Path from clpipe.fmri_preprocess import fmriprep_process -def test_fmriprep_process(config_file): +def test_fmriprep_process(config_file: Path): """ Check basic attempt to run fmriprep_process.""" fmriprep_process(config_file=config_file) diff --git a/tests/test_glm_setup.py b/tests/test_glm_setup.py index d23c9b4f..8bf8e1e1 100644 --- a/tests/test_glm_setup.py +++ b/tests/test_glm_setup.py @@ -2,7 +2,7 @@ from pathlib import Path from clpipe.glm_setup import glm_setup -def test_glm_setup_basic(config_file: Path, glm_config_file: Path) -> None: +def test_glm_setup_basic(config_file: Path, glm_config_file: Path): """Check basic attempt to run fmriprep_process.""" glm_setup(config_file=config_file, glm_config_file=glm_config_file) diff --git a/tests/test_project_setup.py b/tests/test_project_setup.py index 24e710d1..dc938e4d 100644 --- a/tests/test_project_setup.py +++ b/tests/test_project_setup.py @@ -1,12 +1,13 @@ import pytest from pathlib import Path import os +from typing import List from clpipe.project_setup import project_setup PROJECT_TITLE = "test_project" -def test_setup_no_source(project_dir): +def test_setup_no_source(project_dir: Path): """Check that clpipe creates an empty data_DICOMs folder in the project directory when no source is provided. """ @@ -16,7 +17,7 @@ def test_setup_no_source(project_dir): assert Path(project_dir / "data_DICOMs").exists() -def test_setup_referenced_source(project_dir, source_data): +def test_setup_referenced_source(project_dir: Path, source_data: Path): """Check that clpipe's generated config file references a specified source directory that is not within the clpipe project directory. This variant should not create a data_DICOMs directory. @@ -28,7 +29,7 @@ def test_setup_referenced_source(project_dir, source_data): assert not Path(project_dir / "data_DICOMs").exists() and Path(source_data).exists() -def test_setup_symlink_source(project_dir, source_data): +def test_setup_symlink_source(project_dir: Path, source_data: Path): """Check that clpipe creates a data_DICOMs dir and symlinks it to the given source data. """ @@ -40,7 +41,7 @@ def test_setup_symlink_source(project_dir, source_data): assert Path(project_dir / "data_DICOMs").exists() and os.path.islink(project_dir / "data_DICOMs") @pytest.mark.skip(reason="Feature Not implemented") -def test_setup_move_source(project_dir): +def test_setup_move_source(project_dir: Path): """Note: this is currently NOT IMPLEMENTED in project setup. Check that clpipe creates a data_DICOMs dir and moves the data from a given @@ -53,7 +54,7 @@ def test_setup_move_source(project_dir): pass -def test_setup_missing(clpipe_dir, project_paths): +def test_setup_missing(clpipe_dir: Path, project_paths: List[Path]): """Check if any expected clpipe setup fails to create any expect folders or files.""" missing = project_paths @@ -65,7 +66,7 @@ def test_setup_missing(clpipe_dir, project_paths): assert len(missing) == 0, f"Missing expected paths: {missing}" -def test_setup_extra(clpipe_dir, project_paths): +def test_setup_extra(clpipe_dir: Path, project_paths: List[Path]): """Check to see if clpipe setup creates any extra, unexpected folders or files.""" extra = [] @@ -78,7 +79,7 @@ def test_setup_extra(clpipe_dir, project_paths): @pytest.fixture() -def project_paths(): +def project_paths() -> List[Path]: # TODO: We should eventually just pull these constants from central config data_BIDS = Path("data_BIDS") From a98fbf10892c763b52b16b0fac872bd87e7218c4 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 13:40:45 -0500 Subject: [PATCH 73/82] Add a change log to documentation. --- docs/changelog.md | 65 +++++++++++++++++++++++++++++++++++++++++++++ docs/dicom_sync.rst | 2 +- docs/index.rst | 1 + 3 files changed, 67 insertions(+), 1 deletion(-) create mode 100644 docs/changelog.md diff --git a/docs/changelog.md b/docs/changelog.md new file mode 100644 index 00000000..8e5bc8ce --- /dev/null +++ b/docs/changelog.md @@ -0,0 +1,65 @@ +# Change Log - NEW + +## 1.7.3 (Feb 22, 2023) + +## 1.7.2 (Jan 31, 2023) + +### Flywheel Sync + +clpipe can now be used to sync DICOM data from Flywheel. Using Flywheel through clpipe allows the user to store their project's remote Flywheel path and download destination in their clpipe configuration file, and to automatically submit Flywheel sync commands to a cluster as batch jobs. clpipe also cleans up an oddly-named, empty temporary directory that is left behind when running Flywheel's sync command. + +This feature is accessed with the command `clpipe dicom flywheel_sync`. The clpipe subcommand `clpipe dicom` was added to give this command a "home" and to house future dicom-specific commands. + +``` +clpipe dicom flywheel_sync -c /path/to/your/clpipe_config.json -submit +``` + +### GLM +- combined `l1_prepare_fsf` and `l2_prepare_fsf` into the `clpipe glm prepare` command to match how the launch command works: +``` +> clpipe glm prepare +Usage: clpipe glm prepare [OPTIONS] LEVEL MODEL + + Propagate an .fsf file template for L1 or L2 GLM analysis. + + LEVEL is the level of anlaysis, L1 or L2 + + MODEL must be a a corresponding L1 or L2 model from your GLM configuration + file. + +Options: + -glm_config_file FILE The path to your clpipe configuration file. + [required] + -debug Flag to enable detailed error messages and traceback. + -help, -h, --help Show this message and exit. +``` +- `l1_prepare_fsf` no longer leaves an underscore at the end of the .fsf file names or the feat output directories +- L1 and L2 config files now support LogDirectory options for specifying where to send the output of the launch command. The output folder is used by default if no LogDirectory is specified. +- Improved error handling for `clpipe glm prepare` command + +### clpipe bids convert +- Command has been moved from `clpipe bids` to the new `clpipe dicom`, which better reflects the BIDS conversion operation (a command acting on DICOM data to convert it to BIDS format). It has also been renamed from from `convert` back to its original name, `convert2bids`, to improve the clarity of this command's name. + +Usage now looks like this: + +`clpipe dicom convert2bids -c path/to/my/clpipe_config.json 256 -submit` + +### postproc2 + +- now supports either fmriprep v20 or v21's directory structure +- no longer deletes "processing_graph.dot" (source of "processing_graph.png" to avoid a race condition issues sometimes causing jobs to fail + +### get_reports + +Command `get_reports`, used for combining fMRIPrep QC report outputs into a ZIP archive: + +- has been added to the main clpipe menu as `clpipe reports fmriprep` +- now prints more log messages +- creates an archive with a shorter directory path +- archive is now named "fMRIPrep_Archive" by default + +### Other Updates + +- Default version of fMRIPrep referenced in config updated to v21.0.2 +- Shorthand commands (e.g. -c for config_file, -s for submit) have been made consistent across all commands under the `clpipe` command +- `clpipe postproc` no longer fails silently when no subjects are found - error is now raised \ No newline at end of file diff --git a/docs/dicom_sync.rst b/docs/dicom_sync.rst index f842881b..a0229383 100644 --- a/docs/dicom_sync.rst +++ b/docs/dicom_sync.rst @@ -1,5 +1,5 @@ =========================== -Flywheel Sync - NEW +Flywheel Sync =========================== clpipe now provides an avenue for syncing DICOM data with a remote source through diff --git a/docs/index.rst b/docs/index.rst index 911f08bc..e79aea2c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -26,6 +26,7 @@ clpipe was developed to streamline the processing of MRI data using the high per roi_extraction cli dicom_sync + changelog .. toctree:: :maxdepth: 1 From e76c9e8f20cf8f15de36cb653a65c80818bf9240 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 14:11:04 -0500 Subject: [PATCH 74/82] Move the setup of GLM log directory before the config file dump, so that GLM log directory is filled out properly in user's newly setup GLM config file. --- clpipe/config_json_parser.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/clpipe/config_json_parser.py b/clpipe/config_json_parser.py index eeceaaba..2c75c05d 100644 --- a/clpipe/config_json_parser.py +++ b/clpipe/config_json_parser.py @@ -126,11 +126,13 @@ def setup_glm(self, project_path): os.mkdir(os.path.join(project_path, "l2_fsfs")) os.mkdir(os.path.join(project_path, "l2_gfeat_folders")) - glm_config.config_json_dump(project_path, "glm_config.json") - shutil.copyfile(resource_filename('clpipe', 'data/l2_sublist.csv'), os.path.join(project_path, "l2_sublist.csv")) glm_config.config['GLMSetupOptions']['LogDirectory'] = os.path.join(project_path, "logs", "glm_setup_logs") os.mkdir(os.path.join(project_path, "logs", "glm_setup_logs")) + glm_config.config_json_dump(project_path, "glm_config.json") + shutil.copyfile(resource_filename('clpipe', 'data/l2_sublist.csv'), os.path.join(project_path, "l2_sublist.csv")) + + def setup_fmriprep_directories(self, bidsDir, workingDir, outputDir, log_dir = None): if bidsDir is not None: self.config['FMRIPrepOptions']['BIDSDirectory'] = os.path.abspath(bidsDir) From d0b21ebd2c323337a3ab088ae845b74eae4754cc Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 14:11:43 -0500 Subject: [PATCH 75/82] Create a new temp dir for transient temp files within the tests folder, so that its easier to find than in TestDir. --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 5c469bf1..9da229b3 100644 --- a/.gitignore +++ b/.gitignore @@ -61,6 +61,7 @@ crashlytics-build.properties staticfiles .env TestFiles/ +tests/temp tests/artifacts deploy.sh From 874263c5b2427ab32c4a0f075a52900502f96b9e Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 14:12:00 -0500 Subject: [PATCH 76/82] Update tests README file to reflect changed temp dir location. --- tests/README.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/README.md b/tests/README.md index e572144b..566ff306 100644 --- a/tests/README.md +++ b/tests/README.md @@ -18,10 +18,9 @@ fixtures**. These fixtures are defined in `tests/conftest.py`. clpipe's tests often leverage temporary file storage. To control where this this location is by default, you can use the pytest flag `--basetemp`. -As clpipe's `.gitignore` includes `TestFiles` by default, this makes it a good -place to store your temporary files for easy viewing. This could be -set like this: `--basetemp TestFiles/tmp`, where `tmp` is a sub-folder manually -created to house temporary test files. +clpipe's `.gitignore` includes `tests/temp` by default for this purpose. You can point +pytest to this folder like this: `--basetemp tests/temp`, where `temp` is a +sub-folder manually created to house temporary test files. Many clpipe tests are not fully transient, and instead save their outputs to allow for inspection. These output files are saved to `tests/artifacts`, From 189c965aaf11c3b411611c1005c4de83c92cb1ac Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 14:38:40 -0500 Subject: [PATCH 77/82] Update changelog to include 1.7.3 changes, also use new Enhancements/Bug Fixes format. --- docs/changelog.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docs/changelog.md b/docs/changelog.md index 8e5bc8ce..71fd0d43 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -2,6 +2,27 @@ ## 1.7.3 (Feb 22, 2023) +### Enhancements + +- `setup`: the "SourceOptions" block for `dicom flywheel_sync` is now included in the default configuration file +- `setup`: more modality examples added to conversion_config.json starter file (T1w and fmap) +- `setup`: conversion_config.json starter file now includes dcm2niix customization line which sets its search depth to its maximum value, allowing dcm2bids to work with flywheel's DICOM sync directory +- `setup`: the default `.bidsignore` file now includes `scans.json`, a file generated by heudiconv which would cause validation to fail otherwise +- `bids validate`: In `clpipe_config.json`, Moved `BIDSValidatorImage` from `PostprocessingOptions` block to `BIDSValidationOptions` block. The command will still look for the image in its old location if it can't find it in BIDSValidationOptions, maintaining backwards compatibility with < 1.7.3 config files +- `glm prepare`: improved logging messages +- `glm prepare` : changed how file copying works so only file contents are copied, not their permissions, making the command easier to run in a shared environment +- `clpipe`: `-config_file/-c` is now a required argument in most commands to prevent unhelpful error output if no config file given, and to steer clpipe more towards being configuration file driven +- `clpipe`: ignore writing to clpipe.log if the user does not have permission to do so +- `clpipe`: clpipe.log file now includes usernames +- `clpipe`: clpipe.log file is written with group write permission by default + +### Bug Fixes + +- `setup`: generated glm_config.json file's `[GLMSetupOptions][LogDirectory]` field is now automatically filled out. Previously it was left blank, despite the appropriate log folder being automatically created, and would throw an error in the batch manager if not filled out manually. +- `reports fmriprep`: fixed issue where the main .html report files were not being bundled into the output zip +- `glm prepare`: fixed issue where FileNotFoundErrors were not caught correctly, causing the program to exit earlier than intended + + ## 1.7.2 (Jan 31, 2023) ### Flywheel Sync From 30cdf6b0d2cab3c0d74fca21f85af98ab2525a5a Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 14:44:09 -0500 Subject: [PATCH 78/82] Update default fmriprep reference to v22. --- clpipe/data/defaultConfig.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clpipe/data/defaultConfig.json b/clpipe/data/defaultConfig.json index dfb3063f..87442ee1 100644 --- a/clpipe/data/defaultConfig.json +++ b/clpipe/data/defaultConfig.json @@ -31,7 +31,7 @@ "BIDSDirectory": "", "WorkingDirectory": "", "OutputDirectory": "", - "FMRIPrepPath": "/proj/hng/singularity_imgs/fmriprep_21.0.2.sif", + "FMRIPrepPath": "/proj/hng/singularity_imgs/fmriprep_22.1.1.sif", "FreesurferLicensePath": "/proj/hng/singularity_imgs/license.txt", "UseAROMA": false, "CommandLineOpts": "", From 5fcb9017efe3c7bb065e02cd38f2cc8c7fcefd1e Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 15:37:41 -0500 Subject: [PATCH 79/82] Fix referenced paths for test_prepare_confounds and confounds_aroma. --- tests/postprocess2/test_wf_chains.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/postprocess2/test_wf_chains.py b/tests/postprocess2/test_wf_chains.py index 78a8c58f..ca41e592 100644 --- a/tests/postprocess2/test_wf_chains.py +++ b/tests/postprocess2/test_wf_chains.py @@ -236,8 +236,8 @@ def test_prepare_confounds(sample_confounds_timeseries, postprocessing_config, a test_path = helpers.create_test_dir(artifact_dir, request.node.name) out_path = test_path / "new_confounds.tsv" - cf_workflow = build_confounds_processing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, - out_file=out_path, base_dir=test_path, crashdump_dir=test_path, tr=2) + cf_workflow = build_confounds_processing_workflow(postprocessing_config, confounds_file=sample_confounds_timeseries, + export_file=out_path, base_dir=test_path, crashdump_dir=test_path, tr=2) cf_workflow.run() @@ -252,8 +252,8 @@ def test_prepare_confounds_aroma(sample_confounds_timeseries, postprocessing_con postprocessing_config["ProcessingSteps"] = ["AROMARegression", "TemporalFiltering", "IntensityNormalization"] - cf_workflow = build_confounds_processing_workflow(postprocessing_config, confound_file=sample_confounds_timeseries, - out_file=out_path, mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, + cf_workflow = build_confounds_processing_workflow(postprocessing_config, confounds_file=sample_confounds_timeseries, + export_file=out_path, mixing_file=sample_melodic_mixing, noise_file=sample_aroma_noise_ics, base_dir=test_path, crashdump_dir=test_path, tr=2) cf_workflow.run() From 9c201761faba3fe5bd16a3dc53041a16c6eeb624 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 15:38:53 -0500 Subject: [PATCH 80/82] Fix task names in fmriprep fixture. --- tests/conftest.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index a13debda..d90baa2b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -160,10 +160,13 @@ def clpipe_bids_dir(clpipe_dir): #TODO: seperate AROMA into its own type of fmriprep dir @pytest.fixture(scope="module") def clpipe_fmriprep_dir(clpipe_bids_dir, sample_raw_image, sample_raw_image_mask, - sample_confounds_timeseries, sample_melodic_mixing, sample_aroma_noise_ics, sample_fmriprep_dataset_description): - """Fixture which adds fmriprep subject folders and mock fmriprep output data to data_fmriprep directory.""" + sample_confounds_timeseries, sample_melodic_mixing, sample_aroma_noise_ics, + sample_fmriprep_dataset_description) -> Path: + """ Fixture which adds fmriprep subject folders and mock + fmriprep output data to data_fmriprep directory. + """ - tasks = ["rest", "task1", "task2_run-1", "task2_run-2"] + tasks = ["rest", "1", "2_run-1", "2_run-2"] image_space = "space-MNI152NLin2009cAsym" bold_suffix = "desc-preproc_bold.nii.gz" From 731a114b2a205b48821e92d1e7f69f2908f4e181 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 15:39:14 -0500 Subject: [PATCH 81/82] Add tests for building image/confounds export path. --- tests/postprocess2/test_functions.py | 37 +++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/tests/postprocess2/test_functions.py b/tests/postprocess2/test_functions.py index 4bcc775a..5faff7f8 100644 --- a/tests/postprocess2/test_functions.py +++ b/tests/postprocess2/test_functions.py @@ -36,4 +36,39 @@ def test_postprocess_subjects_dir_invalid_subject(clpipe_fmriprep_dir, artifact_ with pytest.raises(SystemExit): postprocess_subjects(subjects=['99'], config_file=config, fmriprep_dir=fmriprep_dir, - output_dir=postproc_dir, log_dir=log_dir) \ No newline at end of file + output_dir=postproc_dir, log_dir=log_dir) + + +def test_build_export_path_image(clpipe_fmriprep_dir: Path): + """Test that the correct export path for given inputs is constructed.""" + + # Build the fMRIPrep input image path + image_name = "sub-0_task-rest_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz" + fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" + image_path = fmriprep_dir / "sub-0" / "func" / image_name + + # Build the output path + subject_out_dir = clpipe_fmriprep_dir / "data_postproc2" / "sub-0" + + # Build full export path + export_path = build_export_path(image_path, '0', fmriprep_dir, subject_out_dir) + + assert str(export_path) == str(subject_out_dir / "func" / "sub-0_task-rest_space-MNI152NLin2009cAsym_desc-postproc_bold.nii.gz") + + +def test_build_export_path_confounds(clpipe_fmriprep_dir: Path): + """Test that the correct export path for given inputs is constructed.""" + + # Build the fMRIPrep input image path + confounds_name = "sub-0_task-rest_desc-confounds_timeseries.tsv" + fmriprep_dir = clpipe_fmriprep_dir / "data_fmriprep" / "fmriprep" + confounds_path = fmriprep_dir / "sub-0" / "func" / confounds_name + + # Build the output path + subject_out_dir = clpipe_fmriprep_dir / "data_postproc2" / "sub-0" + + # Build full export path + export_path = build_export_path(confounds_path, '0', fmriprep_dir, subject_out_dir) + + assert str(export_path) == str(subject_out_dir / "func" / "sub-0_task-rest_desc-confounds_timeseries.tsv") + \ No newline at end of file From f58c06f93a9fb8b1bd3f2fbb05aa15bc7f142be8 Mon Sep 17 00:00:00 2001 From: Will Asciutto Date: Wed, 22 Feb 2023 15:39:38 -0500 Subject: [PATCH 82/82] Expose build_export_path as a public function so it can be tested. --- clpipe/fmri_postprocess2.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/clpipe/fmri_postprocess2.py b/clpipe/fmri_postprocess2.py index c9db4945..9ee87062 100644 --- a/clpipe/fmri_postprocess2.py +++ b/clpipe/fmri_postprocess2.py @@ -371,7 +371,7 @@ def postprocess_image( if confounds_path is not None: try: - confounds_export_path = _build_export_path( + confounds_export_path = build_export_path( confounds_path, query_params["subject"], fmriprep_dir, subject_out_dir) @@ -386,7 +386,7 @@ def postprocess_image( logger.warn("Skipping confounds processing") if not confounds_only: - image_export_path = _build_export_path( + image_export_path = build_export_path( image_path, query_params["subject"], fmriprep_dir, subject_out_dir) image_wf = _setup_image_workflow( @@ -446,9 +446,9 @@ def _setup_image_workflow( return wf -def _build_export_path(image_path: os.PathLike, subject_id: str, +def build_export_path(image_path: os.PathLike, subject_id: str, fmriprep_dir: os.PathLike, - subject_out_dir: os.PathLike): + subject_out_dir: os.PathLike) -> Path: """Builds a new name for a processed image. Args: