From d68248fdfed22f551a2adecd40bdb7e631f20610 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 3 Aug 2023 16:00:41 -0400 Subject: [PATCH 01/54] Fixing registration and masks handling --- USAGE | 10 +-- main.nf | 22 +++-- modules/io.nf | 19 ++--- modules/tracking/processes/preprocess.nf | 17 +--- .../processes/registration_processes.nf | 84 +++++++------------ .../tracking/processes/tracking_processes.nf | 23 +---- modules/tracking/workflows/preprocessing.nf | 2 - modules/tracking/workflows/registration.nf | 11 +-- modules/tracking/workflows/tracking.nf | 4 +- nextflow.config | 2 +- 10 files changed, 64 insertions(+), 130 deletions(-) diff --git a/USAGE b/USAGE index 7482974..1b0c383 100644 --- a/USAGE +++ b/USAGE @@ -161,12 +161,10 @@ OPTIONAL ARGUMENTS (current value) --manual_frf FRF set manually (--manual_frf "$manual_frf") SEEDING AND TRAKING OPTIONS - --use_brain_mask_as_tracking_mask If selected, will use the complete brain mask (including GM, CSF and WM) as a tracking mask. - Be careful when examining your results, if the hemispheres are not properly separated by the mask, - streamlines could connect both hemisphere in the superior regions. Default is false and WM mask - is used as a tracking mask. ($use_brain_mask_as_tracking_mask) - --fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding and tracking. - ($fa_seeding_mask_thr) + --fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding and + tracking. ($fa_seeding_mask_thr) + --erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($erosion) --algo Tracking algorithm [prob, det]. ($algo) --nb_seeds Number of seeds related to the seeding type param. ($nb_seeds) --seeding Seeding type [npv, nt]. ($seeding) diff --git a/main.nf b/main.nf index af09bf8..30e4422 100644 --- a/main.nf +++ b/main.nf @@ -27,7 +27,6 @@ workflow { PREPROCESSING(data.dwi, data.rev, data.anat, - data.brain_mask, data.wm_mask) DTI(PREPROCESSING.out.dwi_bval_bvec, @@ -37,12 +36,11 @@ workflow { SH(PREPROCESSING.out.dwi_bval_bvec) } - fa_channel = DTI.out.fa_and_md - .map{[it[0], it[1]]} + md_channel = DTI.out.fa_and_md + .map{ [it[0], it[2]]} - REGISTRATION(PREPROCESSING.out.dwi_bval_bvec, - PREPROCESSING.out.t2w_and_mask, - fa_channel) + REGISTRATION(md_channel, + PREPROCESSING.out.t2w_and_mask) b0_mask_channel = PREPROCESSING.out.b0_and_mask .map{[it[0], it[2]]} @@ -50,11 +48,11 @@ workflow { FODF(PREPROCESSING.out.dwi_bval_bvec, b0_mask_channel, DTI.out.fa_and_md) - - masks_channel = REGISTRATION.out.warped_anat - .map{[it[0], it[2], it[3]]} - TRACKING(masks_channel, + fa_channel = DTI.out.fa_and_md + .map{[it[0], it[1]]} + + TRACKING(REGISTRATION.out.warped_anat, FODF.out.fodf, fa_channel) } @@ -166,7 +164,7 @@ def display_usage () { "sfthres":"$params.sfthres", "min_len":"$params.min_len", "max_len":"$params.max_len", - "use_brain_mask_as_tracking_mask":"$params.use_brain_mask_as_tracking_mask", + "erosion":"$params.erosion", "compress_value":"$params.compress_value", "output_dir":"$params.output_dir", "processes_denoise_dwi":"$params.processes_denoise_dwi", @@ -285,6 +283,7 @@ def display_run_info () { log.info "" log.info "SEEDING AND TRACKING OPTIONS" log.info "FA threshold for seeding mask: $params.fa_seeding_mask_thr" + log.info "Erosion value to apply on brain mask: $params.erosion" log.info "Algorithm for tracking: $params.algo" log.info "Number of seeds per voxel: $params.nb_seeds" log.info "Seeding method: $params.seeding" @@ -294,7 +293,6 @@ def display_run_info () { log.info "Minimum fiber length: $params.min_len" log.info "Maximum fiber length: $params.max_len" log.info "Random tracking seed: $params.tracking_seed" - log.info "Use brain mask for tracking: $params.use_brain_mask_as_tracking_mask" log.info "Compression value: $params.compress_value" log.info "" log.info "PROCESSES PER TASKS" diff --git a/modules/io.nf b/modules/io.nf index 3618743..d6b4e63 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -24,16 +24,14 @@ workflow get_data_tracking { log.info " | ├-- *bval" log.info " | ├-- *bvec" log.info " | ├-- *revb0.nii.gz" - log.info " | ├-- *T2w.nii.gz" - log.info " | ├-- *brain_mask.nii.gz" + log.info " | ├-- *t2w.nii.gz" log.info " | └-- *wm_mask.nii.gz" log.info " └-- S2" log.info " ├-- *dwi.nii.gz" log.info " ├-- *bval" log.info " ├-- *bvec" log.info " ├-- *revb0.nii.gz" - log.info " ├-- *T2w.nii.gz" - log.info " ├-- *brain_mask.nii.gz" + log.info " ├-- *t2w.nii.gz" log.info " └-- *wm_mask.nii.gz" error "Please resubmit your command with the previous file structure." } @@ -47,8 +45,6 @@ workflow get_data_tracking { { fetch_id(it.parent, input) } anat_channel = Channel.fromFilePairs("$input/**/*t2w.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } - brain_mask_channel = Channel.fromFilePairs("$input/**/*brain_mask.nii.gz", size: 1, flat: true) - { fetch_id(it.parent, input) } wm_mask_channel = Channel.fromFilePairs("$input/**/*wm_mask.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } @@ -59,7 +55,6 @@ workflow get_data_tracking { dwi = dwi_channel rev = rev_channel anat = anat_channel - brain_mask = brain_mask_channel wm_mask = wm_mask_channel } @@ -80,8 +75,7 @@ workflow get_data_connectomics { log.info " | ├-- *peaks.nii.gz" log.info " | ├-- *fodf.nii.gz" log.info " | ├-- OGenericAffine.mat" - log.info " | ├-- synoutput0Warp.nii.gz" - log.info " | ├-- maskoutput0Warp.nii.gz" + log.info " | ├-- output1Warp.nii.gz" log.info " | └-- metrics" log.info " | └-- METRIC_NAME.nii.gz [Optional]" log.info " └-- S2" @@ -94,8 +88,7 @@ workflow get_data_connectomics { log.info " ├-- *peaks.nii.gz" log.info " ├-- *fodf.nii.gz" log.info " ├-- OGenericAffine.mat" - log.info " ├-- synoutput0Warp.nii.gz" - log.info " ├-- maskoutput0Warp.nii.gz" + log.info " ├-- output1Warp.nii.gz" log.info " └-- metrics" log.info " └-- METRIC_NAME.nii.gz [Optional]" error "Please resubmit your command with the previous file structure." @@ -116,14 +109,14 @@ workflow get_data_connectomics { { it.parent.parent.name } t2w_channel = Channel.fromFilePairs("$input/**/*t2w_warped.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } - transfos_channel = Channel.fromFilePairs("$input/**/{0GenericAffine.mat,synoutput0Warp.nii.gz,maskoutput0Warp.nii.gz}", size: 3, flat: true) + transfos_channel = Channel.fromFilePairs("$input/**/{0GenericAffine.mat,output1Warp.nii.gz}", size: 2, flat: true) { fetch_id(it.parent, input) } // Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. dwi_peaks_channel = dwi_peaks_channel.map{sid, bvals, bvecs, dwi, peaks -> tuple(sid, dwi, bvals, bvecs, peaks)} // Setting up transfos channel in this order : sid, affine, syn, masksyn - transfos_channel = transfos_channel.map{sid, affine, masksyn, syn -> tuple(sid, affine, syn, masksyn)} + // transfos_channel = transfos_channel.map{sid, affine, masksyn, syn -> tuple(sid, affine, syn, masksyn)} // Flattening metrics channel. metrics_channel = metrics_channel.transpose().groupTuple() diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index 4f21a03..59ac167 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -187,10 +187,9 @@ process CROP_ANAT { cpus 1 input: - tuple val(sid), path(t2w), path(brain_mask), path(wm_mask) + tuple val(sid), path(t2w), path(wm_mask) output: - tuple val(sid), path("${sid}__t2w_cropped.nii.gz"), path("${sid}__brain_mask_cropped.nii.gz"), - path("${sid}__wm_mask_cropped.nii.gz"), emit: cropped_t2w_and_mask + tuple val(sid), path("${sid}__t2w_cropped.nii.gz"), path("${sid}__wm_mask_cropped.nii.gz"), emit: cropped_t2w_and_mask script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 @@ -198,8 +197,6 @@ process CROP_ANAT { export OPENBLAS_NUM_THREADS=1 scil_crop_volume.py $t2w ${sid}__t2w_cropped.nii.gz\ --output_bbox t2w_boundingBox.pkl -f - scil_crop_volume.py $brain_mask ${sid}__brain_mask_cropped.nii.gz\ - --input_bbox t2w_boundingBox.pkl -f scil_crop_volume.py $wm_mask ${sid}__wm_mask_cropped.nii.gz\ --input_bbox t2w_boundingBox.pkl -f """ @@ -210,10 +207,9 @@ process RESAMPLE_ANAT { cpus 1 input: - tuple val(sid), path(t2w), path(brain_mask), path(wm_mask) + tuple val(sid), path(t2w), path(wm_mask) output: - tuple val(sid), path("${sid}__t2w_resampled.nii.gz"), path("${sid}__brain_mask_resampled.nii.gz"), - path("${sid}__wm_mask_resampled.nii.gz"), emit: t2w_and_mask + tuple val(sid), path("${sid}__t2w_resampled.nii.gz"), path("${sid}__wm_mask_resampled.nii.gz"), emit: t2w_and_mask script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 @@ -221,11 +217,6 @@ process RESAMPLE_ANAT { export OPENBLAS_NUM_THREADS=1 scil_resample_volume.py $t2w ${sid}__t2w_resampled.nii.gz\ --voxel_size $params.t2w_resolution --interp $params.t2w_interpolation -f - scil_resample_volume.py $brain_mask ${sid}__brain_mask_resampled.nii.gz\ - --voxel_size $params.t2w_resolution --interp $params.mask_interpolation\ - -f - scil_image_math.py convert ${sid}__brain_mask_resampled.nii.gz ${sid}__brain_mask_resampled.nii.gz\ - --data_type uint8 -f scil_resample_volume.py $wm_mask ${sid}__wm_mask_resampled.nii.gz\ --voxel_size $params.t2w_resolution --interp $params.mask_interpolation\ -f diff --git a/modules/tracking/processes/registration_processes.nf b/modules/tracking/processes/registration_processes.nf index e7ccf08..fd942fb 100644 --- a/modules/tracking/processes/registration_processes.nf +++ b/modules/tracking/processes/registration_processes.nf @@ -7,74 +7,48 @@ process REGISTER_ANAT { cpus params.processes_registration input: - tuple val(sid), path(dwi), path(bval), path(t2w), path(brain_mask), path(wm_mask), - path(fa) + tuple val(sid), path(md), path(t2w), path(wm_mask) output: - tuple val(sid), path("${sid}__pwd_avg.nii.gz"), emit: pwd_avg tuple val(sid), path("${sid}__t2w_warped.nii.gz"), - path("${sid}__brain_mask_warped.nii.gz"), path("${sid}__wm_mask_warped.nii.gz"), emit: warped_anat tuple val(sid), path("output0GenericAffine.mat"), - path("synoutput0Warp.nii.gz"), - path("maskoutput0Warp.nii.gz"), emit: transfos + path("output1Warp.nii.gz"), emit: transfos tuple val(sid), - path("outputInverseWarped.nii.gz"), - path("synoutput0InverseWarp.nii.gz"), - path("maskoutput0InverseWarp.nii.gz"), emit: inverse_transfo + path("outputInverseWarped.nii.gz"), emit: inverse_transfo script: - //** For some reason, mapping of the masks isn't as good as the t2w, performing a final SyN registration **// - //** to fit the brain mask properly. **// + // ** Registration from t2w to diff space in infant returns better result when using the MD map due ** // + // ** to similar intensities (white = CSF in both volumes). See: Uus A, Pietsch M, Grigorescu I, ** // + // ** Christiaens D, Tournier JD, Grande LC, Hutter J, Edwards D, Hajnal J, Deprez M. Multi-channel ** // + // ** Registration for Diffusion MRI: Longitudinal Analysis for the Neonatal Brain. Biomedical Image ** // + // ** Registration. 2020 May 13;12120:111–21. doi: 10.1007/978-3-030-50120-4_11. PMCID: PMC7279935. ** // """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 export ANTS_RANDOM_SEED=1234 - scil_compute_powder_average.py $dwi $bval ${sid}__pwd_avg.nii.gz\ - --b0_thr $params.b0_thr -f - antsRegistration --dimensionality 3 --float 0\ - --output [output,outputWarped.nii.gz,outputInverseWarped.nii.gz]\ - --interpolation Linear --use-histogram-matching 0\ - --winsorize-image-intensities [0.005,0.995]\ - --initial-moving-transform [${sid}__pwd_avg.nii.gz,$t2w,1]\ - --transform Rigid['0.2']\ - --metric MI[${sid}__pwd_avg.nii.gz,$t2w,1,32,Regular,0.25]\ - --convergence [500x250x125x50,1e-6,10] --shrink-factors 8x4x2x1\ - --smoothing-sigmas 3x2x1x0\ - --transform Affine['0.2']\ - --metric MI[${sid}__pwd_avg.nii.gz,$t2w,1,32,Regular,0.25]\ - --convergence [500x250x125x50,1e-6,10] --shrink-factors 8x4x2x1\ - --smoothing-sigmas 3x2x1x0\ - --verbose 1 - mv outputWarped.nii.gz ${sid}__t2w_affine_warped.nii.gz - antsRegistration --dimensionality 3 --float 0\ - --output [synoutput,synoutputWarped.nii.gz,synoutputInverseWarped.nii.gz]\ - --interpolation Linear --use-histogram-matching 0\ - --winsorize-image-intensities [0.005,0.995]\ - --transform SyN[0.1,3,0]\ - --metric MI[${sid}__pwd_avg.nii.gz,$t2w,1,32]\ - --metric CC[$fa,$t2w,1,4]\ - --convergence [200x150x200,1e-6,10] --shrink-factors 4x2x1\ - --smoothing-sigmas 3x2x1\ - --verbose 1 - mv synoutputWarped.nii.gz ${sid}__t2w_warped.nii.gz - antsApplyTransforms -d 3 -i $brain_mask -r ${sid}__pwd_avg.nii.gz\ - -o ${sid}__brain_mask_initial_warp.nii.gz -n NearestNeighbor\ - -t synoutput0Warp.nii.gz output0GenericAffine.mat -u int - antsRegistration -d 3 --float 0\ - --output [maskoutput,maskoutputWarped.nii.gz,maskoutputInverseWarped.nii.gz]\ - --interpolation NearestNeighbor --use-histogram-matching 0\ - --winsorize-image-intensities [0.005,0.995]\ - --transform SyN[0.1,3,0]\ - --metric MI[${sid}__pwd_avg.nii.gz,${sid}__brain_mask_initial_warp.nii.gz]\ - --convergence [500x250x125,1e-6,10] --shrink-factors 4x2x1\ - --smoothing-sigmas 3x2x1\ - --verbose 1 - mv maskoutputWarped.nii.gz ${sid}__brain_mask_warped.nii.gz - antsApplyTransforms -d 3 -i $wm_mask -r ${sid}__pwd_avg.nii.gz\ - -o ${sid}__wm_mask_warped.nii.gz -n NearestNeighbor\ - -t maskoutput0Warp.nii.gz synoutput0Warp.nii.gz output0GenericAffine.mat -u int + antsRegistration --verbose 1 --dimensionality 3 --float 0 \ + --collapse-output-transforms 1 \ + --output [ output,outputWarped.nii.gz,outputInverseWarped.nii.gz ] \ + --interpolation Linear --use-histogram-matching 0 \ + --winsorize-image-intensities [ 0.005,0.995 ] \ + --initial-moving-transform [ $md,$t2w,1 ] \ + --transform Rigid[ 0.1 ] \ + --metric MI[ $md,$t2w,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform Affine[ 0.1 ] --metric MI[ $md,$t2w,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform SyN[ 0.1,3,0 ] \ + --metric CC[ $md,$t2w,1,4 ] \ + --convergence [ 200x150x200x200,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox + mv outputWarped.nii.gz ${sid}__t2w_warped.nii.gz + antsApplyTransforms -d 3 -i $wm_mask -r ${sid}__t2w_warped.nii.gz \ + -o ${sid}__wm_mask_warped.nii.gz -n NearestNeighbor \ + -t output1Warp.nii.gz output0GenericAffine.mat -u int """ } \ No newline at end of file diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index 1ead6bc..15e7aa0 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -7,7 +7,7 @@ process GENERATE_MASKS { cpus 1 input: - tuple val(sid), path(brain_mask), path(wm_mask), path(fa) + tuple val(sid), path(t2w), path(wm_mask), path(fa) output: tuple val(sid), path("${sid}__wm_mask_final.nii.gz"), path("${sid}__brain_mask.nii.gz"), emit: masks @@ -17,12 +17,12 @@ process GENERATE_MASKS { export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 mrthreshold $fa ${sid}__fa_mask.nii.gz -abs $params.fa_seeding_mask_thr -nthreads 1 -force + scil_image_math.py lower_threshold $t2w 0 brain_mask.nii.gz --data_type uint8 + scil_image_math.py erosion brain_mask.nii.gz $params.erosion ${sid}__brain_mask.nii.gz --data_type uint8 -f scil_image_math.py union ${sid}__fa_mask.nii.gz $wm_mask\ wm_mask_temp.nii.gz --data_type uint8 -f - scil_image_math.py intersection wm_mask_temp.nii.gz $brain_mask\ + scil_image_math.py intersection wm_mask_temp.nii.gz brain_mask.nii.gz\ ${sid}__wm_mask_final.nii.gz --data_type uint8 -f - scil_image_math.py convert $brain_mask ${sid}__brain_mask.nii.gz\ - --data_type uint8 -f """ } @@ -35,21 +35,6 @@ process LOCAL_TRACKING { output: tuple val(sid), path("${sid}__local_tracking.trk"), emit: tractogram script: - if (params.use_brain_mask_as_tracking_mask) - """ - export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 - export OMP_NUM_THREADS=1 - export OPENBLAS_NUM_THREADS=1 - scil_compute_local_tracking.py $fodf $wm_mask $brain_mask\ - tmp.trk --algo $params.algo --$params.seeding $params.nb_seeds\ - --seed $params.tracking_seed --step $params.step_size --theta $params.theta\ - --sfthres $params.sfthres --min_length $params.min_len\ - --max_length $params.max_len --sh_basis $params.sh_fitting_basis\ - --compress $params.compress_value - scil_remove_invalid_streamlines.py tmp.trk\ - ${sid}__local_tracking.trk --remove_single_point - """ - else """ scil_compute_local_tracking.py $fodf $wm_mask $wm_mask\ tmp.trk --algo $params.algo --$params.seeding $params.nb_seeds\ diff --git a/modules/tracking/workflows/preprocessing.nf b/modules/tracking/workflows/preprocessing.nf index 26e3c5b..ea5a2cd 100644 --- a/modules/tracking/workflows/preprocessing.nf +++ b/modules/tracking/workflows/preprocessing.nf @@ -22,7 +22,6 @@ workflow PREPROCESSING { dwi_channel rev_channel anat_channel - brain_mask_channel wm_mask_channel main: @@ -64,7 +63,6 @@ workflow PREPROCESSING { } anat_crop_channel = anat_crop_channel - .combine(brain_mask_channel, by: 0) .combine(wm_mask_channel, by:0) CROP_ANAT(anat_crop_channel) diff --git a/modules/tracking/workflows/registration.nf b/modules/tracking/workflows/registration.nf index 79c0177..2818ef3 100644 --- a/modules/tracking/workflows/registration.nf +++ b/modules/tracking/workflows/registration.nf @@ -8,15 +8,12 @@ include { workflow REGISTRATION { take: - dwi_channel + md_channel t2w_and_mask - fa_channel main: - - register_channel = dwi_channel - .map{[it[0], it[1], it[2]]} - .combine(t2w_and_mask, by: 0) - .combine(fa_channel, by: 0) + + register_channel = t2w_and_mask + .combine(md_channel, by: 0) REGISTER_ANAT(register_channel) diff --git a/modules/tracking/workflows/tracking.nf b/modules/tracking/workflows/tracking.nf index 890564e..4c5373c 100644 --- a/modules/tracking/workflows/tracking.nf +++ b/modules/tracking/workflows/tracking.nf @@ -9,11 +9,11 @@ include { workflow TRACKING { take: - brain_wm_mask_channel + t2_and_mask_channel fodf_channel fa_channel main: - masks_channel = brain_wm_mask_channel + masks_channel = t2_and_mask_channel .combine(fa_channel, by: 0) GENERATE_MASKS(masks_channel) diff --git a/nextflow.config b/nextflow.config index 381d70a..4559d55 100644 --- a/nextflow.config +++ b/nextflow.config @@ -87,7 +87,7 @@ params { min_len = 10 max_len = 200 tracking_seed = 0 - use_brain_mask_as_tracking_mask = false + erosion = 4 compress_value = 0.2 // Processes per tasks From 48c4fab191055b932467c67ddaaad1daa0b30744 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 8 Aug 2023 09:57:03 -0400 Subject: [PATCH 02/54] fix transform labels and minor changes --- USAGE | 6 +++--- main.nf | 1 + modules/connectomics/processes/transform_labels.nf | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/USAGE b/USAGE index 1b0c383..f406b69 100644 --- a/USAGE +++ b/USAGE @@ -46,10 +46,10 @@ DESCRIPTION [Input] ├-- S1 | ├-- *dwi.nii.gz [Required for all profiles] - | ├-- *.bval [Required for all profiles] - | ├-- *.bvec [Required for all profiles] + | ├-- *.bval [Required for all profiles] + | ├-- *.bvec [Required for all profiles] | ├-- *revb0.nii.gz [Required only for tracking] - | ├-- *t2w_warped.nii.gz [Required for all profiles] + | ├-- *t2w.nii.gz [Required for all profiles] | ├-- *brain_mask.nii.gz [Required only for tracking] | ├-- *wm_mask.nii.gz [Required only for tracking] | ├-- *.trk [Required only for connectomics] diff --git a/main.nf b/main.nf index 30e4422..023b4ba 100644 --- a/main.nf +++ b/main.nf @@ -61,6 +61,7 @@ workflow { tracking = TRACKING.out.trk // ** Labels needs to be provided as an input, since they are not computed at some point in the pipeline ** // + input = file(params.input) labels = Channel.fromFilePairs("$input/**/*labels.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } diff --git a/modules/connectomics/processes/transform_labels.nf b/modules/connectomics/processes/transform_labels.nf index 2677e05..6de3dde 100644 --- a/modules/connectomics/processes/transform_labels.nf +++ b/modules/connectomics/processes/transform_labels.nf @@ -8,13 +8,13 @@ process TRANSFORM_LABELS { label "TRANSFORM_LABELS" input: - tuple val(sid), path(labels), path(t2), path(mat), path(syn), path(masksyn) + tuple val(sid), path(labels), path(t2), path(mat), path(syn) output: tuple val(sid), path("${sid}__labels_warped.nii.gz"), emit: labels_warped script: """ antsApplyTransforms -d 3 -i $labels -r $t2 -o ${sid}__labels_warped.nii.gz \ - -t $masksyn $syn $mat -n NearestNeighbor + -t $syn $mat -n NearestNeighbor scil_image_math.py convert ${sid}__labels_warped.nii.gz ${sid}__labels_warped.nii.gz \ --data_type int16 -f """ From db3416321eda5af3f699556f5ecd86063f3b692e Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 8 Aug 2023 12:17:23 -0400 Subject: [PATCH 03/54] fix registration channel --- modules/tracking/workflows/registration.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/tracking/workflows/registration.nf b/modules/tracking/workflows/registration.nf index 2818ef3..b2f1d74 100644 --- a/modules/tracking/workflows/registration.nf +++ b/modules/tracking/workflows/registration.nf @@ -12,8 +12,8 @@ workflow REGISTRATION { t2w_and_mask main: - register_channel = t2w_and_mask - .combine(md_channel, by: 0) + register_channel = md_channel + .combine(t2w_and_mask, by: 0) REGISTER_ANAT(register_channel) From e51b83651e4587100707d9f354ea1cb1f336defd Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 8 Aug 2023 14:18:03 -0400 Subject: [PATCH 04/54] fix typo --- modules/tracking/processes/tracking_processes.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index 15e7aa0..4263e8f 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -21,7 +21,7 @@ process GENERATE_MASKS { scil_image_math.py erosion brain_mask.nii.gz $params.erosion ${sid}__brain_mask.nii.gz --data_type uint8 -f scil_image_math.py union ${sid}__fa_mask.nii.gz $wm_mask\ wm_mask_temp.nii.gz --data_type uint8 -f - scil_image_math.py intersection wm_mask_temp.nii.gz brain_mask.nii.gz\ + scil_image_math.py intersection wm_mask_temp.nii.gz ${sid}__brain_mask.nii.gz\ ${sid}__wm_mask_final.nii.gz --data_type uint8 -f """ } From 473edd867fce7db9d34a19139c6507dc6035874a Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 12 Sep 2023 14:28:12 -0400 Subject: [PATCH 05/54] Fix USAGE and labels registration --- USAGE | 6 ++++-- main.nf | 3 +-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/USAGE b/USAGE index f406b69..31ba7e2 100644 --- a/USAGE +++ b/USAGE @@ -49,7 +49,8 @@ DESCRIPTION | ├-- *.bval [Required for all profiles] | ├-- *.bvec [Required for all profiles] | ├-- *revb0.nii.gz [Required only for tracking] - | ├-- *t2w.nii.gz [Required for all profiles] + | ├-- *t2w.nii.gz [Required only for tracking] + | |-- *t2w_warped.nii.gz [Required only for connectomics] | ├-- *brain_mask.nii.gz [Required only for tracking] | ├-- *wm_mask.nii.gz [Required only for tracking] | ├-- *.trk [Required only for connectomics] @@ -66,7 +67,8 @@ DESCRIPTION ├-- *bval [Required for all profiles] ├-- *bvec [Required for all profiles] ├-- *revb0.nii.gz [Required only for tracking] - ├-- *t2w.nii.gz [Required for all profiles] + ├-- *t2w.nii.gz [Required only for tracking] + |-- *t2w_warped.nii.gz [Required only for connectomics] ├-- *brain_mask.nii.gz [Required only for tracking] ├-- *wm_mask.nii.gz [Required only for tracking] ├-- *.trk [Required only for connectomics] diff --git a/main.nf b/main.nf index 023b4ba..af66850 100644 --- a/main.nf +++ b/main.nf @@ -78,8 +78,7 @@ workflow { metrics = provided_metrics .combine(def_metrics, by: 0) - t2w = PREPROCESSING.out.t2w_and_mask - .map{ [it[0], it[1]] } + t2w = REGISTRATION.out.warped_anat transfos = REGISTRATION.out.transfos From 0b61008ad62ec89ea35eeb4adc1ea5096312a357 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Fri, 15 Sep 2023 15:24:34 -0400 Subject: [PATCH 06/54] initial move for a more versatile pipeline --- USAGE | 57 +++-- main.nf | 127 +++++++---- modules/connectomics/processes/commit.nf | 38 +++- .../connectomics/workflows/connectomics.nf | 37 +++- modules/tracking/processes/FODF_processes.nf | 2 +- modules/tracking/processes/preprocess.nf | 105 ++++++++- .../processes/registration_processes.nf | 59 ++++- .../tracking/processes/tracking_processes.nf | 207 ++++++++++++++++-- modules/tracking/workflows/preprocessing.nf | 2 +- modules/tracking/workflows/registration.nf | 34 ++- modules/tracking/workflows/tracking.nf | 70 +++++- nextflow.config | 127 ++++++++--- 12 files changed, 716 insertions(+), 149 deletions(-) diff --git a/USAGE b/USAGE index 31ba7e2..947d3c9 100644 --- a/USAGE +++ b/USAGE @@ -94,11 +94,11 @@ OPTIONAL ARGUMENTS (current value) --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) BET T2 OPTIONS - --run_bet_t2w If set, will perform brain extraction on the input T2w volume. ($run_bet_t2w) + --run_bet_anat If set, will perform brain extraction on the input T2w volume. ($run_bet_anat) Default settings are soft to make sure an already brain extracted volume is not impacted by the bet command. The goal is to clean volumes that still have portions of non-brain structures. - --bet_t2w_f Fractional intensity threshold for bet. ($bet_t2w_f) + --bet_anat_f Fractional intensity threshold for bet. ($bet_anat_f) EDDY AND TOPUP OPTIONS --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) @@ -112,12 +112,11 @@ OPTIONAL ARGUMENTS (current value) --fa_mask_threshold Threshold to use when creating the fa mask for normalization. ($fa_mask_threshold) RESAMPLE OPTIONS - --t2w_resolution Resampling resolution of the T2w image. ($t2w_resolution) - --t2w_interpolation Interpolation method to use after resampling. ($t2w_interpolation) + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) --mask_interpolation Interpolation method to use on the anatomical masks after resampling. ($mask_interpolation) --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) --dwi_interpolation Interpolation method to use after resampling of the dwi volume. ($dwi_interpolation) - --mask_dwi_interpolation Interpolation method to use on the b0 mask after resampling. ($mask_dwi_interpolation) DTI OPTIONS --max_dti_shell_value Maximum b-value threshold to select DTI shells. (b <= $max_dti_shell_value) @@ -162,20 +161,40 @@ OPTIONAL ARGUMENTS (current value) --set_frf If selected, will manually set the frf. ($set_frf) --manual_frf FRF set manually (--manual_frf "$manual_frf") - SEEDING AND TRAKING OPTIONS - --fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding and - tracking. ($fa_seeding_mask_thr) - --erosion Number of voxel to remove from brain mask. Use to remove aberrant - voxel in fa maps. ($erosion) - --algo Tracking algorithm [prob, det]. ($algo) - --nb_seeds Number of seeds related to the seeding type param. ($nb_seeds) - --seeding Seeding type [npv, nt]. ($seeding) - --step_size Step size ($step_size) - --theta Maximum angle between 2 steps. ($theta) - --min_len Minimum length for a streamline. ($min_len) - --max_len Maximum length for a streamline. ($max_len) - --compress_value Compression error threshold. ($compress_value) - --tracking_seed List of random seed numbers for the random number generator. ($tracking_seed) + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nb_seeds Number of seeds related to the seeding type param. ($local_nb_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nb_seeds Number of seeds related to the seeding type param. ($pft_nb_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. ($pft_random_seed) Please write them as a list separated by commas without space e.g. (--tracking_seed 1,2,3) PROCESSES OPTIONS diff --git a/main.nf b/main.nf index af66850..d204a9a 100644 --- a/main.nf +++ b/main.nf @@ -36,11 +36,9 @@ workflow { SH(PREPROCESSING.out.dwi_bval_bvec) } - md_channel = DTI.out.fa_and_md - .map{ [it[0], it[2]]} - - REGISTRATION(md_channel, - PREPROCESSING.out.t2w_and_mask) + REGISTRATION(DTI.out.fa_and_md, + PREPROCESSING.out.t2w_and_mask, + PREPROCESSING.out.b0_and_mask.map{ [it[0], it[1]] }) b0_mask_channel = PREPROCESSING.out.b0_and_mask .map{[it[0], it[2]]} @@ -75,10 +73,15 @@ workflow { def_metrics = DTI.out.fa_and_md .combine(DTI.out.ad_and_rd, by: 0) .combine(FODF.out.afd_and_nufo, by: 0) - metrics = provided_metrics - .combine(def_metrics, by: 0) + metrics = def_metrics + .combine(provided_metrics, by: 0) + + // ** Flattening metrics channel ** // + metrics = metrics.transpose().groupTuple() + .flatMap{ sid, metrics -> metrics.collect{ [sid, it] } } t2w = REGISTRATION.out.warped_anat + .map{ [it[0], it[1]] } transfos = REGISTRATION.out.transfos @@ -120,8 +123,8 @@ def display_usage () { bindings = ["b0_thr":"$params.b0_thr", "initial_bet_f":"$params.initial_bet_f", "final_bet_f":"$params.final_bet_f", - "run_bet_t2w":"$params.run_bet_t2w", - "bet_t2w_f":"$params.bet_t2w_f", + "run_bet_anat":"$params.run_bet_anat", + "bet_anat_f":"$params.bet_anat_f", "topup_config":"$params.topup_config", "encoding_direction":"$params.encoding_direction", "readout":"$params.readout", @@ -131,17 +134,18 @@ def display_usage () { "use_slice_drop_correction":"$params.use_slice_drop_correction", "dwi_shell_tolerance":"$params.dwi_shell_tolerance", "fa_mask_threshold":"$params.fa_mask_threshold", - "t2w_resolution":"$params.t2w_resolution", - "t2w_interpolation":"$params.t2w_interpolation", + "anat_resolution":"$params.anat_resolution", + "anat_interpolation":"$params.anat_interpolation", "mask_interpolation":"$params.mask_interpolation", + "template_t1":"$params.template_t1", "dwi_resolution":"$params.dwi_resolution", "dwi_interpolation":"$params.dwi_interpolation", - "mask_dwi_interpolation":"$params.mask_dwi_interpolation", "max_dti_shell_value":"$params.max_dti_shell_value", "sh_fitting":"$params.sh_fitting", "sh_fitting_order":"$params.sh_fitting_order", "sh_fitting_basis":"$params.sh_fitting_basis", "min_fodf_shell_value":"$params.min_fodf_shell_value", + "fodf_metrics_a_facotr":"$params.fodf_metrics_a_factor", "max_fa_in_ventricle":"$params.max_fa_in_ventricle", "min_md_in_ventricle":"$params.min_md_in_ventricle", "relative_threshold":"$params.relative_threshold", @@ -154,20 +158,46 @@ def display_usage () { "roi_radius":"$params.roi_radius", "set_frf":"$params.set_frf", "manual_frf":"$params.manual_frf", - "fa_seeding_mask_thr":"$params.fa_seeding_mask_thr", - "algo":"$params.algo", - "seeding":"$params.seeding", - "nb_seeds":"$params.nb_seeds", - "tracking_seed":"$params.tracking_seed", - "step_size":"$params.step_size", - "theta":"$params.theta", - "sfthres":"$params.sfthres", - "min_len":"$params.min_len", - "max_len":"$params.max_len", - "erosion":"$params.erosion", - "compress_value":"$params.compress_value", + "run_pft_tracking":"$params.run_pft_tracking", + "pft_compress_streamlines":"$params.pft_compress_streamlines", + "pft_seeding_mask_type":"$params.pft_seeding_mask_type", + "pft_fa_seeding_mask_thr":"$params.pft_fa_seeding_mask_thr", + "pft_algo":"$params.pft_algo", + "pft_nb_seeds":"$params.pft_nb_seeds", + "pft_seeding":"$params.pft_seeding", + "pft_step_size":"$params.pft_step_size", + "pft_theta":"$params.pft_theta", + "pft_sfthres":"$params.pft_sfthres", + "pft_sfthres_init":"$params.pft_sfthres_init", + "pft_min_len":"$params.pft_min_len", + "pft_max_len":"$params.pft_max_len", + "pft_particles":"$params.pft_particles", + "pft_back":"$params.pft_back", + "pft_front":"$params.pft_front", + "pft_compress_value":"$params.pft_compress_value", + "pft_random_seed":"$params.pft_random_seed", + "run_local_tracking":"$params.run_local_tracking", + "local_compress_streamlines":"$params.local_compress_streamlines", + "local_fa_seeding_mask_thr":"$params.local_fa_seeding_mask_thr", + "local_seeding_mask_type":"$params.local_seeding_mask_type", + "local_fa_tracking_mask_thr":"$params.local_fa_tracking_mask_thr", + "local_tracking_mask_type":"$params.local_tracking_mask_type", + "local_algo":"$params.local_algo", + "local_seeding":"$params.local_seeding", + "local_nb_seeds":"$params.local_nb_seeds", + "local_tracking_seed":"$params.local_tracking_seed", + "local_step_size":"$params.local_step_size", + "local_theta":"$params.local_theta", + "local_sfthres":"$params.local_sfthres", + "local_sfthres_init":"$params.local_sfthres_init", + "local_min_len":"$params.local_min_len", + "local_max_len":"$params.local_max_len", + "local_erosion":"$params.local_erosion", + "local_compress_value":"$params.local_compress_value", "output_dir":"$params.output_dir", "processes_denoise_dwi":"$params.processes_denoise_dwi", + "processes_denoise_t1":"$params.processes_denoise_t1", + "processes_bet_t1":"$params.processes_bet_t1", "processes_eddy":"$params.processes_eddy", "processes_registration":"$params.processes_registration", "processes_fodf":"$params.processes_fodf", @@ -178,8 +208,13 @@ def display_usage () { "max_length":"$params.max_length", "loop_max_angle":"$params.loop_max_angle", "outlier_threshold":"$params.outlier_threshold", + "run_commit":"$params.run_commit", + "use_commit2":"$params.use_commit2", + "b_thr":"$params.b_thr", + "ball_stick":"$params.ball_stick", "nbr_dir":"$params.nbr_dir", "para_diff":"$params.para_diff", + "perp_diff":"$params.perp_diff", "iso_diff":"$params.iso_diff", "processes_commit":"$params.processes_commit", "processes_afd_fixel":"$params.processes_afd_fixel", @@ -231,8 +266,8 @@ def display_run_info () { log.info "Finale fractional value for BET: $params.final_bet_f" log.info "" log.info "BET T2W OPTIONS" - log.info "Run BET on T2W image: $params.run_bet_t2w" - log.info "Fractional value for T2W BET: $params.bet_t2w_f" + log.info "Run BET on T2W image: $params.run_bet_anat" + log.info "Fractional value for T2W BET: $params.bet_anat_f" log.info "" log.info "EDDY AND TOPUP OPTIONS" log.info "Configuration for topup: $params.topup_config" @@ -247,8 +282,8 @@ def display_run_info () { log.info "FA threshold for masking: $params.fa_mask_threshold" log.info "" log.info "RESAMPLE ANAT OPTIONS" - log.info "Resampling resolution for T2W: $params.t2w_resolution" - log.info "Interpolation method for T2W: $params.t2w_interpolation" + log.info "Resampling resolution for Anatomical file: $params.anat_resolution" + log.info "Interpolation method for Anatomical file: $params.anat_interpolation" log.info "Interpolation method for masks: $params.mask_interpolation" log.info "" log.info "RESAMPLE DWI OPTIONS" @@ -282,18 +317,30 @@ def display_run_info () { log.info "Manual FRF: $params.manual_frf" log.info "" log.info "SEEDING AND TRACKING OPTIONS" - log.info "FA threshold for seeding mask: $params.fa_seeding_mask_thr" - log.info "Erosion value to apply on brain mask: $params.erosion" - log.info "Algorithm for tracking: $params.algo" - log.info "Number of seeds per voxel: $params.nb_seeds" - log.info "Seeding method: $params.seeding" - log.info "Step size: $params.step_size" - log.info "Theta threshold: $params.theta" - log.info "Spherical function relative threshold: $params.sfthres" - log.info "Minimum fiber length: $params.min_len" - log.info "Maximum fiber length: $params.max_len" - log.info "Random tracking seed: $params.tracking_seed" - log.info "Compression value: $params.compress_value" + log.info "Local tracking : $params.run_local_tracking" + log.info "PFT tracking: $params.run_pft_tracking" + + if ( params.run_pft_tracking ) { + log.info "Algorithm for tracking: $params.pft_algo" + log.info "Number of seeds per voxel: $params.pft_nb_seeds" + log.info "Seeding method: $params.pft_seeding" + log.info "Step size: $params.pft_step_size" + log.info "Theta threshold: $params.pft_theta" + log.info "Minimum fiber length: $params.pft_min_len" + log.info "Maximum fiber length: $params.pft_max_len" + log.info "Compression: $params.pft_compress_streamlines" + } + else { + log.info "Algorithm for tracking: $params.local_algo" + log.info "Number of seeds per voxel: $params.local_nb_seeds" + log.info "Seeding method: $params.local_seeding" + log.info "Step size: $params.local_step_size" + log.info "Theta threshold: $params.local_theta" + log.info "Minimum fiber length: $params.local_min_len" + log.info "Maximum fiber length: $params.local_max_len" + log.info "Compression: $params.local_compress_streamlines" + } + log.info "" log.info "PROCESSES PER TASKS" log.info "Processes for denoising DWI: $params.processes_denoise_dwi" diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index f507985..4e8f35a 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -2,21 +2,45 @@ nextflow.enable.dsl=2 -process COMMIT2 { +process COMMIT { cpus params.processes_commit memory params.commit_memory_limit - label "COMMIT2" + label "COMMIT" input: - tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks) + tuple val(sid), path(trk_h5), path(dwi), path(bval), path(bvec), path(peaks) output: tuple val(sid), path("${sid}__decompose_commit.h5"), emit: h5_commit + tuple val(sid), path("${sid}__essential_tractogram.trk"), emit: trk_commit tuple val(sid), path("${sid}__results_bzs/") + when: + params.run_commit + script: + ball_stick_arg="" + perp_diff_arg="" + if ( params.ball_stick ) { + ball_stick_arg="--ball_stick" + } + else { + perp_diff_arg="--perp_diff $params.perp_diff" + } + if ( params.use_commit2 ) { """ - scil_run_commit.py $h5 $dwi $bval $bvec ${sid}__results_bzs/ --in_peaks $peaks \ - --processes $params.processes_commit --b_thr $params.b0_thr --nbr_dir $params.nbr_dir \ - --commit2 --ball_stick --para_diff $params.para_diff --iso_diff $params.iso_diff -v - mv ${sid}__results_bzs/commit_2/decompose_commit.h5 ./${sid}__decompose_commit.h5 + scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --ball_stick --commit2 --in_peaks $peaks\ + --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ + --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff + mv "${sid}__results_bzs/commit_2/decompose_commit.h5" ./"${sid}__decompose_commit.h5" + mv "${sid}__results_bzs/commit_2/essential_tractogram.trk" ./"${sid}__essential_tractogram.trk" """ + } + else { + """ + scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ + --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir $ball_stick_arg \ + --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff + mv "${sid}__results_bzs/commit_1/decompose_commit.h5" ./"${sid}__decompose_commit.h5" + mv "${sid}__results_bzs/commit_1/essential_tractogram.trk" ./"${sid}__essential_tractogram.trk" + """ + } } \ No newline at end of file diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index f7f61ce..dc7458c 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -4,7 +4,7 @@ nextflow.enable.dsl=2 include { TRANSFORM_LABELS } from "../processes/transform_labels.nf" include { DECOMPOSE_CONNECTIVITY } from "../processes/decompose.nf" -include { COMMIT2 } from "../processes/commit.nf" +include { COMMIT } from "../processes/commit.nf" include { COMPUTE_AFD_FIXEL; COMPUTE_CONNECTIVITY } from "../processes/compute_metrics.nf" include { VISUALIZE_CONNECTIVITY } from "../processes/viz.nf" @@ -17,7 +17,8 @@ workflow CONNECTOMICS { fodf_channel metrics_channel t2w_channel - transfos_channel + transfos_channels + main: channel_for_transfo = labels_channel @@ -26,18 +27,34 @@ workflow CONNECTOMICS { TRANSFORM_LABELS(channel_for_transfo) - decompose_channel = tracking_channel - .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) + if ( params.infant_config ) { + commit_channel = tracking_channel + .combine(dwi_peaks_channel, by: 0) + + COMMIT(commit_channel) + + decompose_channel = COMMIT.out.trk_commit + .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) + + DECOMPOSE_CONNECTIVITY(decompose_channel) + + afd_fixel_channel = DECOMPOSE_CONNECTIVITY.out.decompose + .combine(fodf_channel, by: 0) + } + else { + decompose_channel = tracking_channel + .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) - DECOMPOSE_CONNECTIVITY(decompose_channel) + DECOMPOSE_CONNECTIVITY(decompose_channel) - commit_channel = DECOMPOSE_CONNECTIVITY.out.decompose - .combine(dwi_peaks_channel, by: 0) + commit_channel = DECOMPOSE_CONNECTIVITY.out.decompose + .combine(dwi_peaks_channel, by: 0) - COMMIT2(commit_channel) + COMMIT(commit_channel) - afd_fixel_channel = COMMIT2.out.h5_commit - .combine(fodf_channel, by: 0) + afd_fixel_channel = COMMIT.out.h5_commit + .combine(fodf_channel, by: 0) + } COMPUTE_AFD_FIXEL(afd_fixel_channel) diff --git a/modules/tracking/processes/FODF_processes.nf b/modules/tracking/processes/FODF_processes.nf index c13af32..fbd4329 100644 --- a/modules/tracking/processes/FODF_processes.nf +++ b/modules/tracking/processes/FODF_processes.nf @@ -119,7 +119,7 @@ process FODF_METRICS { --fa_t $params.max_fa_in_ventricle --md_t $params.min_md_in_ventricle\ -f - a_threshold=\$( echo " 2 * `awk '{for(i=1;i<=NF;i++) if(\$i>maxval) maxval=\$i;}; END { print maxval;}' ventricles_fodf_max_value.txt`" | bc ) + a_threshold=\$( echo " $params.fodf_metrics_a_factor * `awk '{for(i=1;i<=NF;i++) if(\$i>maxval) maxval=\$i;}; END { print maxval;}' ventricles_fodf_max_value.txt`" | bc ) scil_compute_fodf_metrics.py ${sid}__fodf.nii.gz --mask $b0_mask --sh_basis $params.basis\ --peaks ${sid}__peaks.nii.gz --peak_indices ${sid}__peak_indices.nii.gz\ diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index 59ac167..46f63a1 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -45,7 +45,7 @@ process BET_T2 { export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - bet2 $anat ${sid}__t2w_bet.nii.gz -f $params.bet_t2w_f + bet2 $anat ${sid}__t2w_bet.nii.gz -f $params.bet_anat_f """ } @@ -182,6 +182,49 @@ process CROP_DWI { """ } +process DENOISE_T1 { + label "DENOISE_T1" + cpus params.processes_denoise_t1 + + input: + tuple val(sid), path(t1) + output: + tuple val(sid), path("${sid}__t1_denoised.nii.gz"), emit: t1_denoised + when: + !params.infant_config + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_run_nlmeans.py $t1 ${sid}__t1_denoised.nii.gz 1\ + --processes $task.cpus -f + """ +} + +process N4_T1 { + label "N4_T1" + cpus 1 + + input: + tuple val(sid), path(t1) + output: + tuple val(sid), path("${sid}__t1_n4.nii.gz"), emit: t1_n4 + when: + !params.infant_config + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + N4BiasFieldCorrection -i $t1\ + -o [${sid}__t1_n4.nii.gz, bias_field_t1.nii.gz]\ + -c [300x150x75x50, 1e-6] -v 1 + """ +} + process CROP_ANAT { label "CROP_VOLUMES" cpus 1 @@ -202,25 +245,73 @@ process CROP_ANAT { """ } +process RESAMPLE_T1 { + label "RESAMPLE_T1" + cpus 1 + + input: + tuple val(sid), path(t1) + output: + tuple val(sid), path("${sid}__t1_resampled.nii.gz"), emit: t1_resampled + when: + !params.infant_config + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_resample_volume.py $t1 ${sid}__t1_resampled.nii.gz\ + --voxel_size $params.anat_resolution \ + --interp $params.anat_interpolation + """ +} + +process BET_T1 { + label "BET_T1" + cpus params.processes_bet_t1 + + input: + tuple val(sid), path(t1) + output: + tuple val(sid), path("${sid}__t1_bet.nii.gz"), + path("${sid}__t1_bet_mask.nii.gz"), emit: t1_and_mask_bet + when: + !params.infant_config + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsBrainExtraction.sh -d 3 -a $t1 -e $params.template_t1/t1_template.nii.gz\ + -o bet/ -m $params.template_t1/t1_brain_probability_map.nii.gz -u 0 + scil_image_math.py convert bet/BrainExtractionMask.nii.gz ${sid}__t1_bet_mask.nii.gz\ + --data_type uint8 + mrcalc $t1 ${sid}__t1_bet_mask.nii.gz -mult ${sid}__t1_bet.nii.gz -nthreads 1 + """ +} + process RESAMPLE_ANAT { label "RESAMPLE_VOLUMES" cpus 1 input: - tuple val(sid), path(t2w), path(wm_mask) + tuple val(sid), path(t2w), path(mask) output: - tuple val(sid), path("${sid}__t2w_resampled.nii.gz"), path("${sid}__wm_mask_resampled.nii.gz"), emit: t2w_and_mask + tuple val(sid), path("${sid}__t2w_resampled.nii.gz"), path("${sid}__mask_resampled.nii.gz"), emit: t2w_and_mask script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 scil_resample_volume.py $t2w ${sid}__t2w_resampled.nii.gz\ - --voxel_size $params.t2w_resolution --interp $params.t2w_interpolation -f - scil_resample_volume.py $wm_mask ${sid}__wm_mask_resampled.nii.gz\ - --voxel_size $params.t2w_resolution --interp $params.mask_interpolation\ + --voxel_size $params.anat_resolution --interp $params.anat_interpolation -f + scil_resample_volume.py $mask ${sid}__mask_resampled.nii.gz\ + --voxel_size $params.anat_resolution --interp $params.mask_interpolation\ -f - scil_image_math.py convert ${sid}__wm_mask_resampled.nii.gz ${sid}__wm_mask_resampled.nii.gz\ + scil_image_math.py convert ${sid}__mask_resampled.nii.gz ${sid}__mask_resampled.nii.gz\ --data_type uint8 -f """ } diff --git a/modules/tracking/processes/registration_processes.nf b/modules/tracking/processes/registration_processes.nf index fd942fb..d74941d 100644 --- a/modules/tracking/processes/registration_processes.nf +++ b/modules/tracking/processes/registration_processes.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl=2 -process REGISTER_ANAT { - label "REGISTER_ANAT" +process REGISTER_T2 { + label "REGISTER_T2" cpus params.processes_registration input: @@ -17,6 +17,8 @@ process REGISTER_ANAT { path("output1Warp.nii.gz"), emit: transfos tuple val(sid), path("outputInverseWarped.nii.gz"), emit: inverse_transfo + when: + params.infant_config script: // ** Registration from t2w to diff space in infant returns better result when using the MD map due ** // @@ -29,7 +31,7 @@ process REGISTER_ANAT { export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 export ANTS_RANDOM_SEED=1234 - antsRegistration --verbose 1 --dimensionality 3 --float 0 \ + antsRegistration --dimensionality 3 --float 0 \ --collapse-output-transforms 1 \ --output [ output,outputWarped.nii.gz,outputInverseWarped.nii.gz ] \ --interpolation Linear --use-histogram-matching 0 \ @@ -51,4 +53,55 @@ process REGISTER_ANAT { -o ${sid}__wm_mask_warped.nii.gz -n NearestNeighbor \ -t output1Warp.nii.gz output0GenericAffine.mat -u int """ +} + +process REGISTER_T1 { + label "REGISTER_ANAT" + cpus params.processes_registration + + input: + tuple val(sid), path(fa), path(t1), path(t1_mask), path(b0) + output: + tuple val(sid), path("${sid}__t1_warped.nii.gz"), emit: t1_warped + tuple val(sid), path("${sid}__output0GenericAffine.mat"), + path("${sid}__output1Warp.nii.gz"), emit: transfos + tuple val(sid), path("${sid}__output1InverseWarp.nii.gz"), emit: inverse_transfo + tuple val(sid), path("${sid}__t1_mask_warped.nii.gz"), emit: t1_mask_warped + when: + !params.infant_config + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsRegistration --dimensionality 3 --float 0\ + --output [output,outputWarped.nii.gz,outputInverseWarped.nii.gz]\ + --interpolation Linear --use-histogram-matching 0\ + --winsorize-image-intensities [0.005,0.995]\ + --initial-moving-transform [$b0,$t1,1]\ + --transform Rigid['0.2']\ + --metric MI[$b0,$t1,1,32,Regular,0.25]\ + --convergence [500x250x125x50,1e-6,10] --shrink-factors 8x4x2x1\ + --smoothing-sigmas 3x2x1x0\ + --transform Affine['0.2']\ + --metric MI[$b0,$t1,1,32,Regular,0.25]\ + --convergence [500x250x125x50,1e-6,10] --shrink-factors 8x4x2x1\ + --smoothing-sigmas 3x2x1x0\ + --transform SyN[0.1,3,0]\ + --metric MI[$b0,$t1,1,32]\ + --metric CC[$fa,$t1,1,4]\ + --convergence [50x25x10,1e-6,10] --shrink-factors 4x2x1\ + --smoothing-sigmas 3x2x1 + mv outputWarped.nii.gz ${sid}__t1_warped.nii.gz + mv output0GenericAffine.mat ${sid}__output0GenericAffine.mat + mv output1InverseWarp.nii.gz ${sid}__output1InverseWarp.nii.gz + mv output1Warp.nii.gz ${sid}__output1Warp.nii.gz + antsApplyTransforms -d 3 -i $t1_mask -r ${sid}__t1_warped.nii.gz\ + -o ${sid}__t1_mask_warped.nii.gz -n NearestNeighbor\ + -t ${sid}__output1Warp.nii.gz ${sid}__output0GenericAffine.mat + scil_image_math.py convert ${sid}__t1_mask_warped.nii.gz ${sid}__t1_mask_warped.nii.gz\ + --data_type uint8 -f + """ } \ No newline at end of file diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index 4263e8f..d572f9e 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -2,6 +2,34 @@ nextflow.enable.dsl=2 +process SEGMENT_TISSUES { + label "SEGMENTATION" + cpus 1 + + input: + tuple val(sid), path(anat) + output: + tuple val(sid), path("${sid}__map_wm.nii.gz"), path("${sid}__map_gm.nii.gz"), + path("${sid}__map_csf.nii.gz"), emit: maps + tuple val(sid), path("${sid}__mask_wm.nii.gz"), path("${sid}__mask_gm.nii.gz"), + path("${sid}__mask_csf.nii.gz"), emit: masks + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + fast -t 1 -n $params.number_of_tissues\ + -H 0.1 -I 4 -l 20.0 -g -o anat.nii.gz $anat + scil_image_math.py convert anat_seg_2.nii.gz ${sid}__mask_wm.nii.gz --data_type uint8 + scil_image_math.py convert anat_seg_1.nii.gz ${sid}__mask_gm.nii.gz --data_type uint8 + scil_image_math.py convert anat_seg_0.nii.gz ${sid}__mask_csf.nii.gz --data_type uint8 + mv anat_pve_2.nii.gz ${sid}__map_wm.nii.gz + mv anat_pve_1.nii.gz ${sid}__map_gm.nii.gz + mv anat_pve_0.nii.gz ${sid}__map_csf.nii.gz + """ +} + process GENERATE_MASKS { label "GENERATE_MASKS" cpus 1 @@ -9,40 +37,187 @@ process GENERATE_MASKS { input: tuple val(sid), path(t2w), path(wm_mask), path(fa) output: - tuple val(sid), path("${sid}__wm_mask_final.nii.gz"), - path("${sid}__brain_mask.nii.gz"), emit: masks + tuple val(sid), path("${sid}__seeding_mask.nii.gz"), + path("${sid}__tracking_mask.nii.gz"), emit: masks + tuple val(sid), path("${sid}__fa_mask.nii.gz") script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - mrthreshold $fa ${sid}__fa_mask.nii.gz -abs $params.fa_seeding_mask_thr -nthreads 1 -force - scil_image_math.py lower_threshold $t2w 0 brain_mask.nii.gz --data_type uint8 - scil_image_math.py erosion brain_mask.nii.gz $params.erosion ${sid}__brain_mask.nii.gz --data_type uint8 -f + bet2 $fa fa_bet -m -f 0.16 + scil_image_math.py erosion fa_bet_mask.nii.gz $params.erosion fa_bet_mask.nii.gz -f + mrcalc fa_bet.nii.gz fa_bet_mask.nii.gz -mult fa_eroded.nii.gz + mrthreshold fa_eroded.nii.gz ${sid}__fa_mask.nii.gz -abs $params.fa_seeding_mask_thr -nthreads 1 -force scil_image_math.py union ${sid}__fa_mask.nii.gz $wm_mask\ - wm_mask_temp.nii.gz --data_type uint8 -f - scil_image_math.py intersection wm_mask_temp.nii.gz ${sid}__brain_mask.nii.gz\ - ${sid}__wm_mask_final.nii.gz --data_type uint8 -f + ${sid}__seeding_mask.nii.gz --data_type uint8 -f + cp ${sid}__seeding_mask.nii.gz ${sid}__tracking_mask.nii.gz """ } +process LOCAL_TRACKING_MASK { + label "LOCAL_TRACKING" + cpus 1 + + input: + tuple val(sid), path(wm), path(fa) + output: + tuple val(sid), path("${sid}__local_tracking_mask.nii.gz"), emit: tracking_mask + when: + params.run_local_tracking + + script: + if (params.local_tracking_mask_type == "wm") + """ + mv $wm ${sid}__local_tracking_mask.nii.gz + """ + else if (params.local_tracking_mask_type == "fa") + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + mrcalc $fa $params.local_fa_tracking_mask_threshold -ge ${sid}__local_tracking_mask.nii.gz\ + -datatype uint8 + """ +} + +process LOCAL_SEEDING_MASK { + label "LOCAL_TRACKING" + cpus 1 + + input: + tuple val(sid), path(wm), path(fa) + output: + tuple val(sid), path("${sid}__local_seeding_mask.nii.gz"), emit: seeding_mask + when: + params.run_local_tracking + + script: + if (params.local_seeding_mask_type == "wm") + """ + mv $wm ${sid}__local_seeding_mask.nii.gz + """ + else if (params.local_seeding_mask_type == "fa") + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + mrcalc $fa $params.local_fa_seeding_mask_threshold -ge ${sid}__local_seeding_mask.nii.gz\ + -datatype uint8 + """ +} + process LOCAL_TRACKING { - label "TRACKING" + label "LOCAL_TRACKING" cpus 2 input: - tuple val(sid), path(fodf), path(wm_mask), path(brain_mask) + tuple val(sid), path(fodf), path(seeding_mask), path(tracking_mask) output: tuple val(sid), path("${sid}__local_tracking.trk"), emit: tractogram + when: + params.run_local_tracking + script: + compress = params.local_compress_streamlines ? '--compress ' + params.local_compress_value : '' """ - scil_compute_local_tracking.py $fodf $wm_mask $wm_mask\ - tmp.trk --algo $params.algo --$params.seeding $params.nb_seeds\ - --seed $params.tracking_seed --step $params.step_size --theta $params.theta\ - --sfthres $params.sfthres --min_length $params.min_len\ - --max_length $params.max_len --sh_basis $params.sh_fitting_basis\ - --compress $params.compress_value + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_compute_local_tracking.py $fodf $seeding_mask $tracking_mask\ + tmp.trk --algo $params.local_algo --$params.local_seeding $params.local_nb_seeds\ + --seed $params.local_tracking_seed --step $params.local_step_size --theta $params.local_theta\ + --sfthres $params.local_sfthres --min_length $params.local_min_len\ + --max_length $params.local_max_len $compress --sh_basis $params.local_sh_fitting_basis\ scil_remove_invalid_streamlines.py tmp.trk\ ${sid}__local_tracking.trk --remove_single_point """ +} + +process PFT_SEEDING_MASK { + label "PFT_TRACKING" + cpus 1 + + input: + tuple val(sid), path(wm), path(fa), path(interface_mask) + output: + tuple val(sid), path("${sid}__pft_seeding_mask.nii.gz"), emit: seeding_mask + when: + params.run_pft_tracking + + script: + if (params.pft_seeding_mask_type == "wm") + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_image_math.py union $wm, $interface_mask ${sid}__pft_seeding_mask.nii.gz\ + --data_type uint8 + """ + else if (params.pft_seeding_mask_type == "interface") + """ + mv $interface_mask ${sid}__pft_seeding_mask.nii.gz + """ + else if (params.pft_seeding_mask_type == "fa") + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + mrcalc $fa $params.pft_fa_seeding_mask_threshold -ge ${sid}__pft_seeding_mask.nii.gz\ + -datatype uint8 + """ +} + +process PFT_TRACKING_MASK { + label "PFT_TRACKING" + cpus 1 + + input: + tuple val(sid), path(wm), path(gm), path(csf) + output: + tuple val(sid), path("${sid}__map_include.nii.gz"), path("${sid}__map_exclude.nii.gz"), emit: tracking_maps + tuple val(sid), path("${sid}__interface.nii.gz"), emit: interface_map + when: + params.run_pft_tracking + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_compute_maps_for_particle_filter_tracking.py $wm $gm $csf\ + --include ${sid}__map_include.nii.gz\ + --exclude ${sid}__map_exclude.nii.gz\ + --interface ${sid}__interface.nii.gz -f + """ +} + +process PFT_TRACKING { + label "PFT_TRACKING" + cpus 2 + + input: + tuple val(sid), path(fodf), path(include), path(exclude), path(seed) + output: + tuple val(sid), path("${sid}__pft_tracking.trk"), emit: tractogram + when: + params.run_pft_tracking + + script: + compress = params.pft_compress_streamlines ? '--compress ' + params.pft_compress_value : '' + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_compute_pft.py $fodf $seed $include $exclude\ + tmp.trk\ + --algo $params.pft_algo --$params.pft_seeding $params.pft_nbr_seeds\ + --seed $curr_seed --step $params.step_size --theta $params.theta\ + --sfthres $params.pft_sfthres --sfthres_init $params.pft_sfthres_init\ + --min_length $params.pft_min_len --max_length $params.pft_max_len\ + --particles $params.pft_particles --back $params.pft_back\ + --forward $params.pft_front $compress --sh_basis $params.basis + scil_remove_invalid_streamlines.py tmp.trk\ + ${sid}__pft_tracking.trk --remove_single_point + """ } \ No newline at end of file diff --git a/modules/tracking/workflows/preprocessing.nf b/modules/tracking/workflows/preprocessing.nf index ea5a2cd..5f7e48e 100644 --- a/modules/tracking/workflows/preprocessing.nf +++ b/modules/tracking/workflows/preprocessing.nf @@ -57,7 +57,7 @@ workflow PREPROCESSING { anat_crop_channel = anat_channel - if (params.run_bet_t2w) { + if (params.run_bet_anat) { BET_T2(anat_channel) anat_crop_channel = BET_T2.out.bet_t2 } diff --git a/modules/tracking/workflows/registration.nf b/modules/tracking/workflows/registration.nf index b2f1d74..e7b21d5 100644 --- a/modules/tracking/workflows/registration.nf +++ b/modules/tracking/workflows/registration.nf @@ -3,21 +3,39 @@ nextflow.enable.dsl=2 include { - REGISTER_ANAT + REGISTER_T1; + REGISTER_T2 } from '../processes/registration_processes.nf' workflow REGISTRATION { take: - md_channel - t2w_and_mask + fa_md_channel + anat_and_mask + b0_channel main: - register_channel = md_channel - .combine(t2w_and_mask, by: 0) + t2_reg_channel = fa_md_channel + .map{ [it[0], it[2]] } + .combine(anat_and_mask, by: 0) - REGISTER_ANAT(register_channel) + REGISTER_T2(t2_reg_channel) + + t1_reg_channel = fa_md_channel + .map{ [it[0], it[1]] } + .combine(anat_and_mask, by: 0) + .combine(b0_channel, by: 0) + + REGISTER_T1(t1_reg_channel) + + if ( params.infant_config ) { + warped_anat = REGISTER_T2.out.warped_anat + transfos = REGISTER_T2.out.transfos + } else { + warped_anat = REGISTER_T1.out.t1_warped + transfos = REGISTER_T1.out.transfos + } emit: - warped_anat = REGISTER_ANAT.out.warped_anat - transfos = REGISTER_ANAT.out.transfos + warped_anat = warped_anat + transfos = transfos } \ No newline at end of file diff --git a/modules/tracking/workflows/tracking.nf b/modules/tracking/workflows/tracking.nf index 4c5373c..e36c7ff 100644 --- a/modules/tracking/workflows/tracking.nf +++ b/modules/tracking/workflows/tracking.nf @@ -3,25 +3,77 @@ nextflow.enable.dsl=2 include { + SEGMENT_TISSUES; GENERATE_MASKS; - LOCAL_TRACKING + LOCAL_TRACKING_MASK; + LOCAL_SEEDING_MASK; + LOCAL_TRACKING; + PFT_SEEDING_MASK; + PFT_TRACKING_MASK; + PFT_TRACKING } from '../processes/tracking_processes.nf' workflow TRACKING { take: - t2_and_mask_channel + anat_and_mask_channel fodf_channel fa_channel main: - masks_channel = t2_and_mask_channel - .combine(fa_channel, by: 0) - GENERATE_MASKS(masks_channel) + if ( params.infant_config ) { - tracking_channel = fodf_channel - .combine(GENERATE_MASKS.out.masks, by: 0) + masks_channel = anat_and_mask_channel + .combine(fa_channel, by: 0) + + GENERATE_MASKS(masks_channel) + + tracking_channel = fodf_channel + .combine(GENERATE_MASKS.out.masks, by: 0) - LOCAL_TRACKING(tracking_channel) + LOCAL_TRACKING(tracking_channel) + out_channel = LOCAL_TRACKING.out.tractogram + + } else { + + anat_channel = anat_and_mask_channel.map{ [it[0], it[1]] } + + SEGMENT_TISSUES(anat_channel) + + local_masks_channel = SEGMENT_TISSUES.out.masks + .map{ [it[0], it[1]] } + .combine(fa_channel, by: 0) + + LOCAL_TRACKING_MASK(local_masks_channel) + LOCAL_SEEDING_MASK(local_masks_channel) + + local_tracking_channel = fodf_channel + .combine(LOCAL_SEEDING_MASK.out.seeding_mask, by: 0) + .combine(LOCAL_TRACKING_MASK.out.tracking_mask, by: 0) + + LOCAL_TRACKING(local_tracking_channel) + + PFT_TRACKING_MASK(SEGMENT_TISSUES.out.maps) + + pft_masks_channel = SEGMENT_TISSUES.out.masks + .map{ [it[0], it[1]] } + .combine(fa_channel, by: 0) + .combine(PFT_TRACKING_MASK.out.interface_map, by: 0) + + PFT_SEEDING_MASK(pft_masks_channel) + + pft_tracking_channel = fodf_channel + .combine(PFT_TRACKING_MASK.out.tracking_maps, by: 0) + .combine(PFT_SEEDING_MASK.out.seeding_mask, by: 0) + + PFT_TRACKING(pft_tracking_channel) + } + + if ( params.run_local_tracking ) { + out_channel = LOCAL_TRACKING.out.tractogram + } else { + out_channel = PFT_TRACKING.out.tractogram + } + emit: - trk = LOCAL_TRACKING.out.tractogram + trk = out_channel } \ No newline at end of file diff --git a/nextflow.config b/nextflow.config index 4559d55..4c85103 100644 --- a/nextflow.config +++ b/nextflow.config @@ -17,17 +17,18 @@ params { // ** TRACKING PARAMS ** // - // Global Options + //** Global Options **// b0_thr = 10 dwi_shell_tolerance = 20 + template_t1 = "/human-data/mni_152_sym_09c/t1" - // BET DWI Options - initial_bet_f = 0.5 - final_bet_f = 0.35 + //** BET DWI Options **// + initial_bet_f = 0.16 + final_bet_f = 0.16 - // BET T2 Options - run_bet_t2w = false - bet_t2w_f = 0.16 + //** BET ANAT Options **// + run_bet_anat = false + bet_anat_f = 0.16 // EDDY and TOPUP Options topup_config = "b02b0.cnf" @@ -39,17 +40,16 @@ params { use_slice_drop_correction = true // NORMALIZE Options - fa_mask_threshold = 0.10 + fa_mask_threshold = 0.4 // RESAMPLE_ANAT Options - t2w_resolution = 1 - t2w_interpolation = "lin" + anat_resolution = 1 + anat_interpolation = "lin" mask_interpolation = "nn" // RESAMPLE_DWI Options dwi_resolution = 1 dwi_interpolation = "lin" - mask_dwi_interpolation = "nn" // EXTRACT_DTI_SHELLS Options max_dti_shell_value = 1200 @@ -61,8 +61,9 @@ params { // FODF Options min_fodf_shell_value = 700 + fodf_metrics_a_factor = 2.0 max_fa_in_ventricle = 0.1 - min_md_in_ventricle = 0.00185 + min_md_in_ventricle = 0.003 relative_threshold = 0.1 basis = "descoteaux07" sh_order = 8 @@ -74,24 +75,56 @@ params { min_nvox = 300 roi_radius = 20 set_frf = true - manual_frf = "12,7,7" - - // Seeding and tracking Options - fa_seeding_mask_thr = 0.1 - algo = "prob" - nb_seeds = 10 - seeding = "npv" - step_size = 0.5 - theta = 20 - sfthres = 0.1 - min_len = 10 - max_len = 200 - tracking_seed = 0 - erosion = 4 - compress_value = 0.2 + manual_frf = "15,4,4" + + //** PFT Seeding and Tracking Options **// + run_pft_tracking = true + pft_compress_streamlines = true + + pft_seeding_mask_type = "wm" + pft_fa_seeding_mask_thr = 0.1 + + pft_algo = "prob" + pft_nb_seeds = 10 + pft_seeding = "npv" + pft_step_size = 0.5 + pft_theta = 20 + pft_sfthres = 0.1 + pft_sfthres_init = 0.5 + pft_min_len = 20 + pft_max_len = 200 + pft_particles = 15 + pft_back = 2 + pft_front = 1 + pft_compress_value = 0.2 + pft_random_seed = 0 + + //** Local Seeding and Tracking Options **// + run_local_tracking = false + local_compress_streamlines = true + + local_fa_tracking_mask_thr = 0.1 + local_tracking_mask_type = "wm" + local_fa_seeding_mask_thr = 0.1 + local_seeding_mask_type = "wm" + + local_algo = "prob" + local_nb_seeds = 10 + local_seeding = "npv" + local_step_size = 0.5 + local_theta = 20 + local_sfthres = 0.1 + local_sfthres_init = 0.5 + local_min_len = 20 + local_max_len = 200 + local_tracking_seed = 0 + local_compress_value = 0.2 + local_erosion = 0 // Processes per tasks processes_denoise_dwi = 4 + processes_denoise_t1 = 4 + processes_bet_t1 = 4 processes_eddy = 1 processes_registration = 4 processes_fodf = 4 @@ -107,9 +140,14 @@ params { loop_max_angle = 330 outlier_threshold = 0.4 - // COMMIT Options + //** COMMIT Options **// + run_commit = true + use_commit2 = true + b_thr = 50 nbr_dir = 500 + ball_stick = true para_diff = "1.7E-3" + perp_diff = "0.51E-3" iso_diff = "2.0E-3" // Processes per tasks @@ -124,6 +162,7 @@ params { // Profiles Options run_tracking = false run_connectomics = false + infant_config = false Mean_FRF_Publish_Dir = "./Results_Infant_Tracking/Mean_FRF" } @@ -167,4 +206,36 @@ profiles { params.run_connectomics = true } + + infant { + + params.infant_config = true + + //** BET DWI Options **// + params.initial_bet_f = 0.5 + params.final_bet_f = 0.35 + + //** NORMALIZE Options **// + params.fa_mask_threshold = 0.10 + + //** FODF Options **// + params.max_fa_in_ventricle = 0.1 + params.min_md_in_ventricle = 0.00185 + + //** FRF Options **// + params.manual_frf = "12,7,7" + + //** LOCAL TRACKING **// + params.run_pft_tracking = false + params.run_local_tracking = true + params.erosion = 6 + params.local_min_len = 15 + + //** COMMIT Options **// + run_commit = true + use_commit2 = false + para_diff = "1.2E-3" + iso_diff = "2.0E-3" + + } } \ No newline at end of file From d326b4ebaf3048aeaeb54ab0a3206f82c0017bb7 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 18 Sep 2023 09:07:00 -0400 Subject: [PATCH 07/54] fix resample and flatten metrics channel --- main.nf | 6 +++++- modules/connectomics/workflows/connectomics.nf | 2 +- modules/io.nf | 7 ------- nextflow.config | 1 + 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/main.nf b/main.nf index d204a9a..ea32258 100644 --- a/main.nf +++ b/main.nf @@ -97,11 +97,14 @@ workflow { if ( params.run_connectomics && !params.run_tracking ) { data = get_data_connectomics() + metrics = data.metrics.transpose().groupTuple() + .flatMap{ sid, metrics -> data.metrics.collect{ [sid, it] } } + CONNECTOMICS(data.trk, data.labels, data.dwi_peaks, data.fodf, - data.metrics, + metrics, data.t2w, data.transfos) } @@ -140,6 +143,7 @@ def display_usage () { "template_t1":"$params.template_t1", "dwi_resolution":"$params.dwi_resolution", "dwi_interpolation":"$params.dwi_interpolation", + "mask_dwi_interpolation":"$params.mask_dwi_interpolation", "max_dti_shell_value":"$params.max_dti_shell_value", "sh_fitting":"$params.sh_fitting", "sh_fitting_order":"$params.sh_fitting_order", diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index dc7458c..4bc152b 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -17,7 +17,7 @@ workflow CONNECTOMICS { fodf_channel metrics_channel t2w_channel - transfos_channels + transfos_channel main: diff --git a/modules/io.nf b/modules/io.nf index d6b4e63..00d54e4 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -115,13 +115,6 @@ workflow get_data_connectomics { // Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. dwi_peaks_channel = dwi_peaks_channel.map{sid, bvals, bvecs, dwi, peaks -> tuple(sid, dwi, bvals, bvecs, peaks)} - // Setting up transfos channel in this order : sid, affine, syn, masksyn - // transfos_channel = transfos_channel.map{sid, affine, masksyn, syn -> tuple(sid, affine, syn, masksyn)} - - // Flattening metrics channel. - metrics_channel = metrics_channel.transpose().groupTuple() - .flatMap{ sid, metrics -> metrics.collect{ [sid, it] } } - emit: trk = tracking_channel labels = labels_channel diff --git a/nextflow.config b/nextflow.config index 4c85103..8c0449a 100644 --- a/nextflow.config +++ b/nextflow.config @@ -50,6 +50,7 @@ params { // RESAMPLE_DWI Options dwi_resolution = 1 dwi_interpolation = "lin" + mask_dwi_interpolation = "nn" // EXTRACT_DTI_SHELLS Options max_dti_shell_value = 1200 From 9ffb034d55286807f3bac8062fb8a4ca792b60cd Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 18 Sep 2023 10:00:59 -0400 Subject: [PATCH 08/54] fix masking --- modules/tracking/processes/tracking_processes.nf | 2 +- modules/tracking/workflows/tracking.nf | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index d572f9e..a5a744f 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -35,7 +35,7 @@ process GENERATE_MASKS { cpus 1 input: - tuple val(sid), path(t2w), path(wm_mask), path(fa) + tuple val(sid), path(wm_mask), path(fa) output: tuple val(sid), path("${sid}__seeding_mask.nii.gz"), path("${sid}__tracking_mask.nii.gz"), emit: masks diff --git a/modules/tracking/workflows/tracking.nf b/modules/tracking/workflows/tracking.nf index e36c7ff..d910494 100644 --- a/modules/tracking/workflows/tracking.nf +++ b/modules/tracking/workflows/tracking.nf @@ -23,6 +23,7 @@ workflow TRACKING { if ( params.infant_config ) { masks_channel = anat_and_mask_channel + .map{ [it[0], it[2]] } .combine(fa_channel, by: 0) GENERATE_MASKS(masks_channel) From 33018a04b14af5768afe7e9f2c7a13787260d825 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 18 Sep 2023 10:03:20 -0400 Subject: [PATCH 09/54] fix local_fa_seeding_mask_thr --- modules/tracking/processes/tracking_processes.nf | 2 +- nextflow.config | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index a5a744f..c51b6ce 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -48,7 +48,7 @@ process GENERATE_MASKS { bet2 $fa fa_bet -m -f 0.16 scil_image_math.py erosion fa_bet_mask.nii.gz $params.erosion fa_bet_mask.nii.gz -f mrcalc fa_bet.nii.gz fa_bet_mask.nii.gz -mult fa_eroded.nii.gz - mrthreshold fa_eroded.nii.gz ${sid}__fa_mask.nii.gz -abs $params.fa_seeding_mask_thr -nthreads 1 -force + mrthreshold fa_eroded.nii.gz ${sid}__fa_mask.nii.gz -abs $params.local_fa_seeding_mask_thr -nthreads 1 -force scil_image_math.py union ${sid}__fa_mask.nii.gz $wm_mask\ ${sid}__seeding_mask.nii.gz --data_type uint8 -f cp ${sid}__seeding_mask.nii.gz ${sid}__tracking_mask.nii.gz diff --git a/nextflow.config b/nextflow.config index 8c0449a..2f2dff8 100644 --- a/nextflow.config +++ b/nextflow.config @@ -231,6 +231,7 @@ profiles { params.run_local_tracking = true params.erosion = 6 params.local_min_len = 15 + params.local_fa_seeding_mask_thr = 0.1 //** COMMIT Options **// run_commit = true From 4728fbcd59bb921e35feb713275851b3b982e083 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 18 Sep 2023 10:06:23 -0400 Subject: [PATCH 10/54] change basis params --- modules/tracking/processes/tracking_processes.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index c51b6ce..c99c85a 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -128,7 +128,7 @@ process LOCAL_TRACKING { tmp.trk --algo $params.local_algo --$params.local_seeding $params.local_nb_seeds\ --seed $params.local_tracking_seed --step $params.local_step_size --theta $params.local_theta\ --sfthres $params.local_sfthres --min_length $params.local_min_len\ - --max_length $params.local_max_len $compress --sh_basis $params.local_sh_fitting_basis\ + --max_length $params.local_max_len $compress --sh_basis $params.basis\ scil_remove_invalid_streamlines.py tmp.trk\ ${sid}__local_tracking.trk --remove_single_point """ From de09cde0b983070f996036dcb414bd6bea908b77 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 18 Sep 2023 15:53:06 -0400 Subject: [PATCH 11/54] commit on trk option --- main.nf | 1 + modules/connectomics/processes/commit.nf | 16 ++++++++++++---- modules/tracking/processes/tracking_processes.nf | 2 +- nextflow.config | 10 ++++++---- 4 files changed, 20 insertions(+), 9 deletions(-) diff --git a/main.nf b/main.nf index ea32258..b152d1c 100644 --- a/main.nf +++ b/main.nf @@ -214,6 +214,7 @@ def display_usage () { "outlier_threshold":"$params.outlier_threshold", "run_commit":"$params.run_commit", "use_commit2":"$params.use_commit2", + "commit_on_trk":"$params.commit_on_trk", "b_thr":"$params.b_thr", "ball_stick":"$params.ball_stick", "nbr_dir":"$params.nbr_dir", diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index 4e8f35a..0b664de 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -30,8 +30,16 @@ process COMMIT { scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --ball_stick --commit2 --in_peaks $peaks\ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff - mv "${sid}__results_bzs/commit_2/decompose_commit.h5" ./"${sid}__decompose_commit.h5" - mv "${sid}__results_bzs/commit_2/essential_tractogram.trk" ./"${sid}__essential_tractogram.trk" + mv "${sid}__results_bzs/commit_2/decompose_commit.h5" "./${sid}__decompose_commit.h5" + mv "${sid}__results_bzs/commit_2/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" + """ + } + else if ( params.commit_on_trk ) { + """ + scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ + --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir $ball_stick_arg \ + --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff + mv "${sid}__results_bzs/commit_1/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" """ } else { @@ -39,8 +47,8 @@ process COMMIT { scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir $ball_stick_arg \ --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff - mv "${sid}__results_bzs/commit_1/decompose_commit.h5" ./"${sid}__decompose_commit.h5" - mv "${sid}__results_bzs/commit_1/essential_tractogram.trk" ./"${sid}__essential_tractogram.trk" + mv "${sid}__results_bzs/commit_1/decompose_commit.h5" "./${sid}__decompose_commit.h5" + mv "${sid}__results_bzs/commit_1/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" """ } } \ No newline at end of file diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index c99c85a..baa4880 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -128,7 +128,7 @@ process LOCAL_TRACKING { tmp.trk --algo $params.local_algo --$params.local_seeding $params.local_nb_seeds\ --seed $params.local_tracking_seed --step $params.local_step_size --theta $params.local_theta\ --sfthres $params.local_sfthres --min_length $params.local_min_len\ - --max_length $params.local_max_len $compress --sh_basis $params.basis\ + --max_length $params.local_max_len $compress --sh_basis $params.basis scil_remove_invalid_streamlines.py tmp.trk\ ${sid}__local_tracking.trk --remove_single_point """ diff --git a/nextflow.config b/nextflow.config index 2f2dff8..e9e9587 100644 --- a/nextflow.config +++ b/nextflow.config @@ -144,6 +144,7 @@ params { //** COMMIT Options **// run_commit = true use_commit2 = true + commit_on_trk = false b_thr = 50 nbr_dir = 500 ball_stick = true @@ -234,10 +235,11 @@ profiles { params.local_fa_seeding_mask_thr = 0.1 //** COMMIT Options **// - run_commit = true - use_commit2 = false - para_diff = "1.2E-3" - iso_diff = "2.0E-3" + params.run_commit = true + params.use_commit2 = false + params.commit_on_trk = true + params.para_diff = "1.2E-3" + params.iso_diff = "2.0E-3" } } \ No newline at end of file From e6153f37f2f6508d9317264217e6f25b1ed53e30 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 18 Sep 2023 16:53:26 -0400 Subject: [PATCH 12/54] adding commit_on_trk process --- modules/connectomics/processes/commit.nf | 31 +++++++++++++++---- .../connectomics/workflows/connectomics.nf | 7 +++-- 2 files changed, 29 insertions(+), 9 deletions(-) diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index 0b664de..e8d0688 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -11,7 +11,6 @@ process COMMIT { tuple val(sid), path(trk_h5), path(dwi), path(bval), path(bvec), path(peaks) output: tuple val(sid), path("${sid}__decompose_commit.h5"), emit: h5_commit - tuple val(sid), path("${sid}__essential_tractogram.trk"), emit: trk_commit tuple val(sid), path("${sid}__results_bzs/") when: params.run_commit @@ -31,24 +30,44 @@ process COMMIT { --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff mv "${sid}__results_bzs/commit_2/decompose_commit.h5" "./${sid}__decompose_commit.h5" - mv "${sid}__results_bzs/commit_2/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" """ } - else if ( params.commit_on_trk ) { + else { """ scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir $ball_stick_arg \ --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff - mv "${sid}__results_bzs/commit_1/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" + mv "${sid}__results_bzs/commit_1/decompose_commit.h5" "./${sid}__decompose_commit.h5" """ } +} + +process COMMIT_ON_TRK { + label "COMMIT" + cpus params.processes_commit + memory params.commit_memory_limit + + input: + tuple val(sid), path(trk_h5), path(dwi), path(bval), path(bvec), path(peaks) + output: + tuple val(sid), path("${sid}__essential_tractogram.trk"), emit: trk_commit + tuple val(sid), path("${sid}__results_bzs/") + when: + params.run_commit + + script: + ball_stick_arg="" + perp_diff_arg="" + if ( params.ball_stick ) { + ball_stick_arg="--ball_stick" + } else { + perp_diff_arg="--perp_diff $params.perp_diff" + } """ scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir $ball_stick_arg \ --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff - mv "${sid}__results_bzs/commit_1/decompose_commit.h5" "./${sid}__decompose_commit.h5" mv "${sid}__results_bzs/commit_1/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" """ - } } \ No newline at end of file diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index 4bc152b..528c398 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -4,7 +4,8 @@ nextflow.enable.dsl=2 include { TRANSFORM_LABELS } from "../processes/transform_labels.nf" include { DECOMPOSE_CONNECTIVITY } from "../processes/decompose.nf" -include { COMMIT } from "../processes/commit.nf" +include { COMMIT; + COMMIT_ON_TRK } from "../processes/commit.nf" include { COMPUTE_AFD_FIXEL; COMPUTE_CONNECTIVITY } from "../processes/compute_metrics.nf" include { VISUALIZE_CONNECTIVITY } from "../processes/viz.nf" @@ -31,9 +32,9 @@ workflow CONNECTOMICS { commit_channel = tracking_channel .combine(dwi_peaks_channel, by: 0) - COMMIT(commit_channel) + COMMIT_ON_TRK(commit_channel) - decompose_channel = COMMIT.out.trk_commit + decompose_channel = COMMIT_ON_TRK.out.trk_commit .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) DECOMPOSE_CONNECTIVITY(decompose_channel) From d89b915e15c6c2b806a1d93b8105776b7cf3da16 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 20 Sep 2023 15:24:40 -0400 Subject: [PATCH 13/54] fix metrics channel --- main.nf | 23 ++++++++++++++--------- modules/io.nf | 2 +- nextflow.config | 2 +- 3 files changed, 16 insertions(+), 11 deletions(-) diff --git a/main.nf b/main.nf index b152d1c..04404f3 100644 --- a/main.nf +++ b/main.nf @@ -67,18 +67,24 @@ workflow { .combine(FODF.out.peaks, by: 0) fodf = FODF.out.fodf - // ** Default metrics will be used with combined metrics provided in the input folder ** // - provided_metrics = Channel.fromFilePairs("$input/**/metrics/*.nii.gz", size: -1, flat: true) - { fetch_id(it.parent, input) } def_metrics = DTI.out.fa_and_md .combine(DTI.out.ad_and_rd, by: 0) .combine(FODF.out.afd_and_nufo, by: 0) - metrics = def_metrics - .combine(provided_metrics, by: 0) + .map{ sid, fa, md, ad, rd, afd, nufo -> tuple(sid, [fa, md, ad, rd, afd, nufo])} + .transpose() + + if ( file("$input/**/metrics/*.nii.gz") ) { + // ** Default metrics will be used with combined metrics provided in the input folder ** // + provided_metrics = Channel.fromFilePairs("$input/**/metrics/*.nii.gz", size: -1, flat: false) + { fetch_id(it.parent.parent, input) } + .transpose() + + def_metrics = def_metrics + .concat(provided_metrics) + } // ** Flattening metrics channel ** // - metrics = metrics.transpose().groupTuple() - .flatMap{ sid, metrics -> metrics.collect{ [sid, it] } } + metrics_flat = def_metrics.groupTuple() t2w = REGISTRATION.out.warped_anat .map{ [it[0], it[1]] } @@ -89,7 +95,7 @@ workflow { labels, dwi_peaks, fodf, - metrics, + metrics_flat, t2w, transfos) } @@ -98,7 +104,6 @@ workflow { data = get_data_connectomics() metrics = data.metrics.transpose().groupTuple() - .flatMap{ sid, metrics -> data.metrics.collect{ [sid, it] } } CONNECTOMICS(data.trk, data.labels, diff --git a/modules/io.nf b/modules/io.nf index 00d54e4..85ad887 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -107,7 +107,7 @@ workflow get_data_connectomics { { fetch_id(it.parent, input) } metrics_channel = Channel.fromFilePairs("$input/**/metrics/*.nii.gz", size: -1, maxDepth: 2) { it.parent.parent.name } - t2w_channel = Channel.fromFilePairs("$input/**/*t2w_warped.nii.gz", size: 1, flat: true) + t2w_channel = Channel.fromFilePairs("$input/**/*t2w.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } transfos_channel = Channel.fromFilePairs("$input/**/{0GenericAffine.mat,output1Warp.nii.gz}", size: 2, flat: true) { fetch_id(it.parent, input) } diff --git a/nextflow.config b/nextflow.config index e9e9587..a51c176 100644 --- a/nextflow.config +++ b/nextflow.config @@ -225,7 +225,7 @@ profiles { params.min_md_in_ventricle = 0.00185 //** FRF Options **// - params.manual_frf = "12,7,7" + params.manual_frf = "12,5,5" //** LOCAL TRACKING **// params.run_pft_tracking = false From a326db43ee98e5cdefdc22b198b1328b37b7347e Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 24 Oct 2023 21:50:25 -0400 Subject: [PATCH 14/54] improved usage and docstring + CI --- .github/workflows/ci.yml | 24 ++ .vscode/extensions.json | 5 + USAGE | 222 +---------------- main.nf | 118 ++++++--- modules/connectomics/USAGE | 111 +++++++++ modules/connectomics/USAGE_INFANT | 111 +++++++++ modules/connectomics/processes/commit.nf | 6 +- .../connectomics/workflows/connectomics.nf | 21 +- modules/io.nf | 56 ++++- modules/template/processes/average.nf | 38 +++ modules/template/processes/registration.nf | 102 ++++++++ modules/template/workflows/pop_template.nf | 59 +++++ modules/tracking/USAGE | 221 +++++++++++++++++ modules/tracking/USAGE_INFANT | 223 ++++++++++++++++++ modules/tracking/processes/preprocess.nf | 28 ++- modules/tracking/workflows/preprocessing.nf | 81 ++++--- modules/tracking/workflows/registration.nf | 5 +- modules/tracking/workflows/tracking.nf | 25 +- nextflow.config | 14 ++ 19 files changed, 1152 insertions(+), 318 deletions(-) create mode 100644 .github/workflows/ci.yml create mode 100644 .vscode/extensions.json create mode 100644 modules/connectomics/USAGE create mode 100644 modules/connectomics/USAGE_INFANT create mode 100644 modules/template/processes/average.nf create mode 100644 modules/template/processes/registration.nf create mode 100644 modules/template/workflows/pop_template.nf create mode 100644 modules/tracking/USAGE create mode 100644 modules/tracking/USAGE_INFANT diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..da3c779 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,24 @@ +name: CI +on: [push, pull_request] + +jobs: + test: + env: + NXF_VER: ${{ matrix.nxf_ver }} + NXF_ANSI_LOG: false + runs-on: ubuntu-latest + strategy: + matrix: + nxf_ver: ['22.10.6', '23.10.0'] + steps: + - uses: actions/checkout@v2 + - name: Install Nextflow + run: | + wget -qO- get.nextflow.io | bash + sudo mv nextflow /usr/local/bin/ + - name: Pull docker image + run: | + docker pull scilus/docker-tractoflow:latest + - name: Run pipeline + run: | + nextflow run ${GITHUB_WORKSPACE} --help -with-docker scilus/docker-tractoflow:latest \ No newline at end of file diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..5de48d8 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,5 @@ +{ + "recommendations": [ + "nf-core.nf-core-extensionpack" + ] +} \ No newline at end of file diff --git a/USAGE b/USAGE index 947d3c9..ab34601 100644 --- a/USAGE +++ b/USAGE @@ -1,4 +1,5 @@ -Infant-DWI Pipeline + +DWI Pipeline ======================== Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. @@ -37,221 +38,4 @@ nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile connec Run Both Pipeline -nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics - -DESCRIPTION - - --input=/path/to/[input_folder] Input folder containing multiple subjects - - [Input] - ├-- S1 - | ├-- *dwi.nii.gz [Required for all profiles] - | ├-- *.bval [Required for all profiles] - | ├-- *.bvec [Required for all profiles] - | ├-- *revb0.nii.gz [Required only for tracking] - | ├-- *t2w.nii.gz [Required only for tracking] - | |-- *t2w_warped.nii.gz [Required only for connectomics] - | ├-- *brain_mask.nii.gz [Required only for tracking] - | ├-- *wm_mask.nii.gz [Required only for tracking] - | ├-- *.trk [Required only for connectomics] - | ├-- *labels.nii.gz [Required only for connectomics] - | ├-- *peaks.nii.gz [Required only for connectomics] - | ├-- *fodf.nii.gz [Required only for connectomics] - | ├-- OGenericAffine.mat [Required only for connectomics] - | ├-- synoutput0Warp.nii.gz [Required only for connectomics] - | ├-- maskoutput0Warp.nii.gz [Required only for connectomics] - | └-- metrics - | └-- METRIC_NAME.nii.gz [Optional] - └-- S2 - ├-- *dwi.nii.gz [Required for all profiles] - ├-- *bval [Required for all profiles] - ├-- *bvec [Required for all profiles] - ├-- *revb0.nii.gz [Required only for tracking] - ├-- *t2w.nii.gz [Required only for tracking] - |-- *t2w_warped.nii.gz [Required only for connectomics] - ├-- *brain_mask.nii.gz [Required only for tracking] - ├-- *wm_mask.nii.gz [Required only for tracking] - ├-- *.trk [Required only for connectomics] - ├-- *labels.nii.gz [Required only for connectomics] - ├-- *peaks.nii.gz [Required only for connectomics] - ├-- *fodf.nii.gz [Required only for connectomics] - ├-- OGenericAffine.mat [Required only for connectomics] - ├-- synoutput0Warp.nii.gz [Required only for connectomics] - ├-- maskoutput0Warp.nii.gz [Required only for connectomics] - └-- metrics - └-- METRIC_NAME.nii.gz [Optional] - -OPTIONAL ARGUMENTS (current value) - -[TRACKING OPTIONS] - - --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) - --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same b-value. - ($dwi_shell_tolerance) - - BET DWI OPTIONS - --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) - --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) - - BET T2 OPTIONS - --run_bet_anat If set, will perform brain extraction on the input T2w volume. ($run_bet_anat) - Default settings are soft to make sure an already brain extracted volume is not impacted - by the bet command. The goal is to clean volumes that still have portions of non-brain - structures. - --bet_anat_f Fractional intensity threshold for bet. ($bet_anat_f) - - EDDY AND TOPUP OPTIONS - --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) - --readout Readout time. ($readout) - --topup_bet_f Fractional intensity threshold for bet before EDDY (generate brain mask). - ($topup_bet_f) - --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) - --use_slice_drop_correction If set, will use the slice drop correction from EDDY. ($use_slice_drop_correction) - - NORMALIZATION OPTIONS - --fa_mask_threshold Threshold to use when creating the fa mask for normalization. ($fa_mask_threshold) - - RESAMPLE OPTIONS - --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) - --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) - --mask_interpolation Interpolation method to use on the anatomical masks after resampling. ($mask_interpolation) - --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) - --dwi_interpolation Interpolation method to use after resampling of the dwi volume. ($dwi_interpolation) - - DTI OPTIONS - --max_dti_shell_value Maximum b-value threshold to select DTI shells. (b <= $max_dti_shell_value) - This is the default behavior unless --dti_shells is specified. - --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). - They need to be supplied between quotes e.g. (--dti_shells "0 1000"). - If supplied, will overwrite --max_dti_shell_value. - - SH OPTIONS - --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and output the SH coefficients - in a Nifti file. ($sh_fitting) - --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even number). ($sh_fitting_order) - Rules : --sh_fitting_order=8 for 45 directions - --sh_fitting_order=6 for 28 directions - --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. ($sh_fitting_basis) - --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is used. - They need to be supplied between quotes e.g. (--sh_fitting_shells "0 1500"). - NOTE: SH fitting works only on single shell. The b0 shell has to be included. - - FODF OPTIONS - --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell (b >= $min_fodf_shell_value) - This is the default behavior unless --fodf_shells is provided. - --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). - They need to be supplied between quotes e.g. (--fodf_shells "0 1500") - If supplied, will overwrite --min_fodf_shell_value. - --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. ($max_fa_in_ventricle) - --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. ($min_md_in_ventricle) - --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) - --basis fODF basis [descoteaux07, tournier07]. ($basis) - --sh_order Sperical Harmonics order ($sh_order) - Rules : --sh_fitting_order=8 for 45 directions - --sh_fitting_order=6 for 28 directions - - FRF OPTIONS - --mean_frf Mean the FRF of all subjects. ($mean_frf) - USE ONLY IF ALL OF SUBJECTS COME FROM THE SAME SCANNER - AND HAVE THE SAME ACQUISITION. - --fa Initial FA threshold to compute the frf. ($fa) - --min_fa Minimum FA threshold to compute the frf. ($min_fa) - --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) - --roi_radius Region of interest radius to compute the frf. ($roi_radius) - --set_frf If selected, will manually set the frf. ($set_frf) - --manual_frf FRF set manually (--manual_frf "$manual_frf") - - LOCAL SEEDING AND TRAKING OPTIONS - --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) - --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) - --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. ($local_fa_seeding_mask_thr) - --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) - --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. ($local_fa_tracking_mask_thr) - --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) - --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant - voxel in fa maps. ($local_erosion) - --local_algo Tracking algorithm [prob, det]. ($local_algo) - --local_nb_seeds Number of seeds related to the seeding type param. ($local_nb_seeds) - --local_seeding Seeding type [npv, nt]. ($local_seeding) - --local_step_size Step size ($local_step_size) - --local_theta Maximum angle between 2 steps. ($local_theta) - --local_min_len Minimum length for a streamline. ($local_min_len) - --local_max_len Maximum length for a streamline. ($local_max_len) - --local_compress_value Compression error threshold. ($local_compress_value) - --local_tracking_seed List of random seed numbers for the random number generator. ($local_tracking_seed) - Please write them as a list separated by commas without space e.g. (--tracking_seed 1,2,3) - - PFT SEEDING AND TRAKING OPTIONS - --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) - --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) - --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. ($pft_fa_seeding_mask_thr) - --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) - --pft_algo Tracking algorithm [prob, det]. ($pft_algo) - --pft_nb_seeds Number of seeds related to the seeding type param. ($pft_nb_seeds) - --pft_seeding Seeding type [npv, nt]. ($pft_seeding) - --pft_step_size Step size ($pft_step_size) - --pft_theta Maximum angle between 2 steps. ($pft_theta) - --pft_min_len Minimum length for a streamline. ($pft_min_len) - --pft_max_len Maximum length for a streamline. ($pft_max_len) - --pft_compress_value Compression error threshold. ($pft_compress_value) - --pft_random_seed List of random seed numbers for the random number generator. ($pft_random_seed) - Please write them as a list separated by commas without space e.g. (--tracking_seed 1,2,3) - - PROCESSES OPTIONS - --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) - --processes_eddy Number of processes for EDDY task. ($processes_eddy) - --processes_registration Number of processes for registration task. ($processes_registration) - --processes_fodf Number of processes for fODF task. ($processes_fodf) - -[CONNECTOMICS OPTIONS] - - DECOMPOSE OPTIONS - --no_pruning If set, will not prune on length ($no_pruning) - --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) - --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) - --min_length Pruning minimal segment length ($min_length) - --max_length Pruning maximal segment length ($max_length) - --loop_max_angle Maximal winding angle over which a streamline is considered as looping - ($loop_max_angle) - --outlier_threshold Outlier removal threshold when using hierarchical QB ($outlier_threshold) - - COMMIT OPTIONS - --nbr_dir Number of directions, (half sphere), representing the possible orientations of the - response functions ($nbr_dir) - --para_diff Parallel diffusivity in mm^2/s ($para_diff) - --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) - - PROCESSES OPTIONS - --processes_commit Number of processes for COMMIT task ($processes_commit) - --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) - --processes_connectivity Number of processes for connectivity task ($processes_connectivity) - -[GLOBAL OPTIONS] - - OUTPUT OPTIONS - --output_dir Directory to write the final results. Default is "./Results_Infant_Tracking/". - -AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) - -no_symlink When used, results will be directly copied in the output folder and symlink will not - be used. - -macos When used, the scratch folder will be modified for MacOS users. - -tracking When used, will perform the tracking pipeline to generate the whole-brain - tractogram from raw diffusion images. - -connectomics When used, will perform connectivity analysis between atlas-based segmentation. - -NOTES - -The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the -Singularity container is used. - -The intermediate working directory is, by default, set to './work'. -To change it, use the '-w WORK_DIR' argument. - -The default config file is tractoflow/nextflow.config. -Use '-C config_file.config' to specify a non-default configuration file. -The '-C config_file.config' must be inserted after the nextflow call -like 'nextflow -C config_file.config run ...'. \ No newline at end of file +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics \ No newline at end of file diff --git a/main.nf b/main.nf index 04404f3..e7280c4 100644 --- a/main.nf +++ b/main.nf @@ -7,64 +7,97 @@ params.help = false // Importing modules and processes include { fetch_id; get_data_tracking; - get_data_connectomics } from "./modules/io.nf" -include { PREPROCESSING } from "./modules/tracking/workflows/preprocessing.nf" + get_data_connectomics; + get_data_template } from "./modules/io.nf" +include { DWI; + ANAT } from "./modules/tracking/workflows/preprocessing.nf" include { DTI } from "./modules/tracking/workflows/DTI.nf" include { SH } from "./modules/tracking/workflows/SH.nf" include { REGISTRATION } from "./modules/tracking/workflows/registration.nf" include { FODF } from "./modules/tracking/workflows/FODF.nf" include { TRACKING } from "./modules/tracking/workflows/tracking.nf" include { CONNECTOMICS } from "./modules/connectomics/workflows/connectomics.nf" +include { POPULATION_TEMPLATE } from "./modules/template/workflows/pop_template.nf" workflow { - if (params.help) display_usage() + if (params.help) { display_usage() } else { display_run_info() + if ( params.template_config ) { + data = get_data_template() + + POPULATION_TEMPLATE(data.anat, + data.dwi, + data.fa, + data.anat_ref, + data.fa_ref) + } + if ( params.run_tracking ) { data = get_data_tracking() - PREPROCESSING(data.dwi, - data.rev, - data.anat, - data.wm_mask) + // ** Merging mask and anat if -profile infant. ** // + if ( params.infant_config ) { + anat_channel = data.anat + .combine(data.wm_mask, by: 0) + } + else { + anat_channel = data.anat + } + // ** Anatomical preprocessing ** // + ANAT(anat_channel) + + // ** DWI preprocessing ** // + DWI(data.dwi, + data.rev) - DTI(PREPROCESSING.out.dwi_bval_bvec, - PREPROCESSING.out.b0_and_mask) + // ** DTI modelling ** // + DTI(DWI.out.dwi_bval_bvec, + DWI.out.b0_and_mask) - if(params.sh_fitting) { - SH(PREPROCESSING.out.dwi_bval_bvec) + // ** SH fitting if set ** // + if ( params.sh_fitting ) { + SH(DWI.out.dwi_bval_bvec) } + // ** Registration of anatomical volume on diffusion volumes. ** // REGISTRATION(DTI.out.fa_and_md, - PREPROCESSING.out.t2w_and_mask, - PREPROCESSING.out.b0_and_mask.map{ [it[0], it[1]] }) + ANAT.out.anat_and_mask, + DWI.out.b0_and_mask.map{ [it[0], it[1]] }) - b0_mask_channel = PREPROCESSING.out.b0_and_mask - .map{[it[0], it[2]]} + // ** Extracting b0 ** // + b0_mask_channel = DWI.out.b0_and_mask + .map{[it[0], it[2]]} - FODF(PREPROCESSING.out.dwi_bval_bvec, + // ** Modelling FODF ** // + FODF(DWI.out.dwi_bval_bvec, b0_mask_channel, DTI.out.fa_and_md) + // ** FA channel for tracking maps ** // fa_channel = DTI.out.fa_and_md .map{[it[0], it[1]]} + // ** Tracking ** // TRACKING(REGISTRATION.out.warped_anat, FODF.out.fodf, fa_channel) } if ( params.run_connectomics && params.run_tracking ) { + // ** Fetch tracking data ** // tracking = TRACKING.out.trk - // ** Labels needs to be provided as an input, since they are not computed at some point in the pipeline ** // + // ** Labels needs to be provided as an input, since they are not computed at ** // + // ** some point in the pipeline ** // input = file(params.input) labels = Channel.fromFilePairs("$input/**/*labels.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } - dwi_peaks = PREPROCESSING.out.dwi_bval_bvec - .combine(FODF.out.peaks, by: 0) + // ** Preparing metrics channel ** // + dwi_peaks = DWI.out.dwi_bval_bvec + .combine(FODF.out.peaks, by: 0) fodf = FODF.out.fodf def_metrics = DTI.out.fa_and_md @@ -86,18 +119,21 @@ workflow { // ** Flattening metrics channel ** // metrics_flat = def_metrics.groupTuple() + // ** Fetching anat ** // t2w = REGISTRATION.out.warped_anat .map{ [it[0], it[1]] } + // ** Fetching transformation files ** // transfos = REGISTRATION.out.transfos + // ** Launching connectomics workflow ** // CONNECTOMICS(tracking, - labels, - dwi_peaks, - fodf, - metrics_flat, - t2w, - transfos) + labels, + dwi_peaks, + fodf, + metrics_flat, + t2w, + transfos) } if ( params.run_connectomics && !params.run_tracking ) { @@ -106,12 +142,12 @@ workflow { metrics = data.metrics.transpose().groupTuple() CONNECTOMICS(data.trk, - data.labels, - data.dwi_peaks, - data.fodf, - metrics, - data.t2w, - data.transfos) + data.labels, + data.dwi_peaks, + data.fodf, + metrics, + data.t2w, + data.transfos) } } } @@ -125,7 +161,21 @@ if (!params.help) { } def display_usage () { - usage = file("$projectDir/USAGE") + if (params.run_tracking && !params.infant_config) { + usage = file("$projectDir/modules/tracking/USAGE") + } + else if (params.run_tracking && params.infant_config) { + usage = file("$projectDir/modules/tracking/USAGE_INFANT") + } + else if (params.run_connectomics && !params.infant_config) { + usage = file("$projectDir/modules/connectomics/USAGE") + } + else if (params.run_connectomics && params.infant_config) { + usage = file("$projectDir/modules/connectomics/USAGE_INFANT") + } + else { + usage = file("$projectDir/USAGE") + } cpu_count = Runtime.runtime.availableProcessors() bindings = ["b0_thr":"$params.b0_thr", @@ -229,8 +279,10 @@ def display_usage () { "processes_commit":"$params.processes_commit", "processes_afd_fixel":"$params.processes_afd_fixel", "processes_connectivity":"$params.processes_connectivity", + "references":"$params.references", "run_tracking":"$params.run_tracking", - "run_connectomics":"$params.run_connectomics" + "run_connectomics":"$params.run_connectomics", + "template_config":"$params.template_config" ] engine = new groovy.text.SimpleTemplateEngine() diff --git a/modules/connectomics/USAGE b/modules/connectomics/USAGE new file mode 100644 index 0000000..12b174f --- /dev/null +++ b/modules/connectomics/USAGE @@ -0,0 +1,111 @@ + +DWI Pipeline +======================== + +Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. + +The connectomics processes are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). + +[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., + TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline + leveraging Nextflow & Singularity, NeuroImage, + https://doi.org/10.1016/j.neuroimage.2020.116889. + + +Run Connectomics Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile connectomics + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | |-- *t2w_warped.nii.gz + | ├-- *.trk + | ├-- *labels.nii.gz + | ├-- *peaks.nii.gz + | ├-- *fodf.nii.gz + | ├-- OGenericAffine.mat + | ├-- output1Warp.nii.gz + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + |-- *t2w_warped.nii.gz + ├-- *.trk + ├-- *labels.nii.gz + ├-- *peaks.nii.gz + ├-- *fodf.nii.gz + ├-- OGenericAffine.mat + ├-- output1Warp.nii.gz + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_Infant_Tracking/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is tractoflow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/USAGE_INFANT b/modules/connectomics/USAGE_INFANT new file mode 100644 index 0000000..70e577f --- /dev/null +++ b/modules/connectomics/USAGE_INFANT @@ -0,0 +1,111 @@ + +DWI Pipeline +======================== + +Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. + +The connectomics processes are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). + +[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., + TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline + leveraging Nextflow & Singularity, NeuroImage, + https://doi.org/10.1016/j.neuroimage.2020.116889. + + +Run Connectomics Pipeline Infant Config + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile connectomics,infant + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | |-- *t2w_warped.nii.gz + | ├-- *.trk + | ├-- *labels.nii.gz + | ├-- *peaks.nii.gz + | ├-- *fodf.nii.gz + | ├-- OGenericAffine.mat + | ├-- output1Warp.nii.gz + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + |-- *t2w_warped.nii.gz + ├-- *.trk + ├-- *labels.nii.gz + ├-- *peaks.nii.gz + ├-- *fodf.nii.gz + ├-- OGenericAffine.mat + ├-- output1Warp.nii.gz + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_Infant_Tracking/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is tractoflow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index e8d0688..1c73aeb 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -8,7 +8,7 @@ process COMMIT { label "COMMIT" input: - tuple val(sid), path(trk_h5), path(dwi), path(bval), path(bvec), path(peaks) + tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks) output: tuple val(sid), path("${sid}__decompose_commit.h5"), emit: h5_commit tuple val(sid), path("${sid}__results_bzs/") @@ -26,7 +26,7 @@ process COMMIT { } if ( params.use_commit2 ) { """ - scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --ball_stick --commit2 --in_peaks $peaks\ + scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs/" --ball_stick --commit2 --in_peaks $peaks\ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff mv "${sid}__results_bzs/commit_2/decompose_commit.h5" "./${sid}__decompose_commit.h5" @@ -34,7 +34,7 @@ process COMMIT { } else { """ - scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ + scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir $ball_stick_arg \ --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff mv "${sid}__results_bzs/commit_1/decompose_commit.h5" "./${sid}__decompose_commit.h5" diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index 528c398..8add5b8 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -22,49 +22,56 @@ workflow CONNECTOMICS { main: + // ** Transforming labels to diff space ** // channel_for_transfo = labels_channel .combine(t2w_channel, by: 0) .combine(transfos_channel, by: 0) - TRANSFORM_LABELS(channel_for_transfo) + // ** If -profile infant is used, first part will be run. COMMIT1 is the only supported ** // + // ** method as of now, since running commit2 requires a decomposition first, which is not an ** // + // ** easy task on infant data. This will be improved in the future. ** // if ( params.infant_config ) { + + // ** COMMIT1 processing on trk ** // commit_channel = tracking_channel .combine(dwi_peaks_channel, by: 0) - COMMIT_ON_TRK(commit_channel) + // ** Decomposing tractogram ** // decompose_channel = COMMIT_ON_TRK.out.trk_commit .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) - DECOMPOSE_CONNECTIVITY(decompose_channel) + // ** Setting output channel ** // afd_fixel_channel = DECOMPOSE_CONNECTIVITY.out.decompose .combine(fodf_channel, by: 0) } else { + // ** Decomposing tractogram ** // decompose_channel = tracking_channel .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) - DECOMPOSE_CONNECTIVITY(decompose_channel) + // ** Running COMMIT1 or COMMIT2 ** // commit_channel = DECOMPOSE_CONNECTIVITY.out.decompose .combine(dwi_peaks_channel, by: 0) - COMMIT(commit_channel) + // ** Setting output channel ** // afd_fixel_channel = COMMIT.out.h5_commit .combine(fodf_channel, by: 0) } + // ** Computing AFD fixel ** // COMPUTE_AFD_FIXEL(afd_fixel_channel) + // ** Computing Connectivity ** // compute_metrics_channel = COMPUTE_AFD_FIXEL.out.decompose_afd .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) .combine(metrics_channel, by: 0) - COMPUTE_CONNECTIVITY(compute_metrics_channel) + // ** Visualizing Connectivity ** // VISUALIZE_CONNECTIVITY(COMPUTE_CONNECTIVITY.out.metrics) - } \ No newline at end of file diff --git a/modules/io.nf b/modules/io.nf index 85ad887..e5a256c 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -3,6 +3,7 @@ nextflow.enable.dsl=2 params.input = false +params.references = false def fetch_id ( dir, dir_base ) { return dir_base.relativize(dir) @@ -21,8 +22,8 @@ workflow get_data_tracking { log.info " [Input]" log.info " ├-- S1" log.info " | ├-- *dwi.nii.gz" - log.info " | ├-- *bval" - log.info " | ├-- *bvec" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" log.info " | ├-- *revb0.nii.gz" log.info " | ├-- *t2w.nii.gz" log.info " | └-- *wm_mask.nii.gz" @@ -67,8 +68,8 @@ workflow get_data_connectomics { log.info " [Input]" log.info " ├-- S1" log.info " | ├-- *dwi.nii.gz" - log.info " | ├-- *bval" - log.info " | ├-- *bvec" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" log.info " | ├-- *t2w.nii.gz" log.info " | ├-- *.trk" log.info " | ├-- *labels.nii.gz" @@ -123,4 +124,51 @@ workflow get_data_connectomics { metrics = metrics_channel t2w = t2w_channel transfos = transfos_channel +} + +workflow get_data_template { + main: + if ( !params.input ) { + log.info "You must provide an input folder containing all images using:" + log.info " --input=/path/to/[input_folder] Input folder containing multiple subjects for tracking" + log.info "" + log.info " [Input]" + log.info " ├-- S1" + log.info " | ├-- *dwi.nii.gz" + log.info " | ├-- *dwi.bvec" + log.info " | ├-- *fa.nii.gz" + log.info " | ├-- *t2w.nii.gz" + log.info " └-- S2" + log.info " ├-- *dwi.nii.gz" + log.info " ├-- *dwi.bvec" + log.info " ├-- *fa.nii.gz" + log.info " └-- *t2w.nii.gz" + log.info " [References]" + log.info " ├-- *fa_ref.nii.gz" + log.info " └-- *t2w_ref.nii.gz" + error "Please resubmit your command with the previous file structure." + } + + input = file(params.input) + references = file(params.references) + + // Loading all files. + dwi_channel = Channel.fromFilePairs("$input/**/*dwi.{nii.gz,bvec}", size: 2, flat: true) + { fetch_id(it.parent, input) } + fa_channel = Channel.fromFilePairs("$input/**/*fa.nii.gz", size:1, flat: true) + { fetch_id(it.parent, input) } + anat_channel = Channel.fromFilePairs("$input/**/*t2w.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + anat_ref = Channel.fromPath("$references/*t2w_ref.nii.gz") + fa_ref = Channel.fromPath("$references/*fa_ref.nii.gz") + + // Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. + dwi_channel = dwi_channel.map{sid, bvecs, dwi -> tuple(sid, dwi, bvecs)} + + emit: + dwi = dwi_channel + anat = anat_channel + fa = fa_channel + anat_ref = anat_ref + fa_ref = fa_ref } \ No newline at end of file diff --git a/modules/template/processes/average.nf b/modules/template/processes/average.nf new file mode 100644 index 0000000..8d4c64f --- /dev/null +++ b/modules/template/processes/average.nf @@ -0,0 +1,38 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +process AVERAGE_VOLUMES_ANAT { + label "AVERAGE_VOLUMES_ANAT" + cpus 4 + publishDir = params.Pop_Avg_Publish_Dir + + input: + path(volumes) + output: + tuple path("population_avg_anat.nii.gz"), + path("population_avg_anat_bet.nii.gz"), + path("population_avg_anat_bet_mask.nii.gz"), emit: popavg + script: + """ + scil_image_math.py mean $volumes population_avg_anat.nii.gz + bet population_avg_anat.nii.gz temp.nii.gz -f 0.7 -R + bet temp.nii.gz population_avg_anat_bet -m -R + """ +} + +process AVERAGE_DWI { + label "AVERAGE_DWI" + cpus 4 + publishDir = params.Pop_Avg_Publish_Dir + + input: + path(volumes) + output: + path("population_avg_dwi.nii.gz"), emit: dwipopavg + + script: + """ + mrmath $volumes mean population_avg_dwi.nii.gz + """ +} \ No newline at end of file diff --git a/modules/template/processes/registration.nf b/modules/template/processes/registration.nf new file mode 100644 index 0000000..2f536ff --- /dev/null +++ b/modules/template/processes/registration.nf @@ -0,0 +1,102 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +process REGISTER_POP { + label "REGISTRATION_POP" + cpus params.processes_registration + + input: + tuple val(sid), path(anat), path(ref) + output: + tuple val(sid), path("${sid}__t2w_warped.nii.gz"), emit: warped_anat + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsRegistration --dimensionality 3 --float 0 \ + --collapse-output-transforms 1 \ + --output [ output,outputWarped.nii.gz,outputInverseWarped.nii.gz ] \ + --interpolation Linear --use-histogram-matching 0 \ + --winsorize-image-intensities [ 0.005,0.995 ] \ + --initial-moving-transform [ $ref,$anat,1 ] \ + --transform Rigid[ 0.1 ] \ + --metric MI[ $ref,$anat,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform Affine[ 0.1 ] --metric MI[ $ref,$anat,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform SyN[ 0.1,3,0 ] \ + --metric CC[ $ref,$anat,1,4 ] \ + --convergence [ 200x150x200x200,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox + mv outputWarped.nii.gz ${sid}__t2w_warped.nii.gz + """ +} + +process REGISTER_FA { + label "REGISTRATION_FA" + cpus params.processes_registration + + input: + tuple val(sid), path(moving), path(ref) + output: + tuple val(sid), path("${sid}__fa_warped.nii.gz"), emit: fa_warped + tuple val(sid), + path("output0GenericAffine.mat"), + path("output1Warp.nii.gz"), emit: transfos + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsRegistration --dimensionality 3 --float 0 \ + --collapse-output-transforms 1 \ + --output [ output,outputWarped.nii.gz,outputInverseWarped.nii.gz ] \ + --interpolation Linear --use-histogram-matching 0 \ + --winsorize-image-intensities [ 0.005,0.995 ] \ + --initial-moving-transform [ $ref,$moving,1 ] \ + --transform Rigid[ 0.1 ] \ + --metric MI[ $ref,$moving,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform Affine[ 0.1 ] --metric MI[ $ref,$moving,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform SyN[ 0.1,3,0 ] \ + --metric CC[ $ref,$moving,1,4 ] \ + --convergence [ 200x150x200x200,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox + mv outputWarped.nii.gz ${sid}__fa_warped.nii.gz + """ +} + +process APPLY_TRANSFORM_DWI_BVECS { + label "APPLY_TRANSFORM" + cpus 1 + + input: + tuple val(sid), path(warped_fa), path(dwi), path(bvec), path(mat), path(warp) + output: + tuple val(sid), path("${sid}__warped_dwi.nii.gz"), emit: dwi_warped + tuple val(sid), path("${sid}__warped_dwi.bvec"), emit: bvec_warped + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsApplyTransforms -d 3 -e 3 \ + -i $dwi -r $warped_fa \ + -n Linear \ + -t $warp $mat \ + -o ${sid}__warped_dwi.nii.gz + scil_image_math.py convert ${sid}__warped_dwi.nii.gz ${sid}__warped_dwi.nii.gz --data_type float32 -f + scil_apply_transform_to_bvecs.py $bvec $mat ${sid}__warped_dwi.bvec + """ +} \ No newline at end of file diff --git a/modules/template/workflows/pop_template.nf b/modules/template/workflows/pop_template.nf new file mode 100644 index 0000000..6184277 --- /dev/null +++ b/modules/template/workflows/pop_template.nf @@ -0,0 +1,59 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +include { + BET_T2 +} from '../../tracking/processes/preprocess.nf' + +include { + REGISTER_POP; + REGISTER_FA; + APPLY_TRANSFORM_DWI_BVECS +} from '../processes/registration.nf' + +include { + AVERAGE_VOLUMES_ANAT; + AVERAGE_DWI +} from '../processes/average.nf' + +workflow POPULATION_TEMPLATE { + take: + anat_channel + dwi_channel + fa_channel + anat_ref_channel + fa_ref_channel + main: + + BET_T2(anat_channel) + + reg_channel = BET_T2.out.bet_t2 + .combine(anat_ref_channel) + + REGISTER_POP(reg_channel) + + all_anats = REGISTER_POP.out.warped_anat + .map{ [it[1]] } + .collect() + + AVERAGE_VOLUMES_ANAT(all_anats) + + reg_fa_channel = fa_channel + .combine(fa_ref_channel) + + REGISTER_FA(reg_fa_channel) + + apply_transfo_channel = REGISTER_FA.out.fa_warped + .combine(dwi_channel, by: 0) + .combine(REGISTER_FA.out.transfos, by: 0) + + APPLY_TRANSFORM_DWI_BVECS(apply_transfo_channel) + + all_dwis = APPLY_TRANSFORM_DWI_BVECS.out.dwi_warped + .map{ [it[1]] } + .collect() + + AVERAGE_DWI(all_dwis) + +} \ No newline at end of file diff --git a/modules/tracking/USAGE b/modules/tracking/USAGE new file mode 100644 index 0000000..e6c6a0d --- /dev/null +++ b/modules/tracking/USAGE @@ -0,0 +1,221 @@ + +DWI Pipeline +======================== + +Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. + +It is possible to also run a connectivity analysis following tracking. Using -profile connectomics, +the pipeline will perform connectivity analysis based on atlas segmentation. The connectomics processes +are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). + +Both analysis (tracking and connectomics) can be performed one after another automatically (using +-profile tracking,connectomics). The pipeline will then reorganised channel to provide the correct inputs. + +[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., +TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline +leveraging Nextflow & Singularity, NeuroImage, +https://doi.org/10.1016/j.neuroimage.2020.116889. + +Run Tracking Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | ├-- *t1w.nii.gz + | ├-- *labels.nii.gz [Only if connectomics is also selected] + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also + | selected] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + ├-- *t1w.nii.gz + ├-- *labels.nii.gz [Only if connectomics is also selected] + └-- metrics + └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also + selected] + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET ANAT OPTIONS + --run_bet_anat If set, will perform brain extraction on the input anat volume. + ($run_bet_anat) + Default settings are soft to make sure an already brain extracted volume + is not impacted + by the bet command. The goal is to clean volumes that still have + portions of non-brain structures. + --bet_anat_f Fractional intensity threshold for bet. ($bet_anat_f) + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL OF SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nb_seeds Number of seeds related to the seeding type param. ($local_nb_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nb_seeds Number of seeds related to the seeding type param. ($pft_nb_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_Infant_Tracking/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is tractoflow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/tracking/USAGE_INFANT b/modules/tracking/USAGE_INFANT new file mode 100644 index 0000000..2472144 --- /dev/null +++ b/modules/tracking/USAGE_INFANT @@ -0,0 +1,223 @@ + +DWI Pipeline +======================== + +Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. + +It is possible to also run a connectivity analysis following tracking. Using -profile connectomics, +the pipeline will perform connectivity analysis based on atlas segmentation. The connectomics processes +are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). + +Both analysis (tracking and connectomics) can be performed one after another automatically (using +-profile tracking,connectomics). The pipeline will then reorganised channel to provide the correct inputs. + +[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., +TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline +leveraging Nextflow & Singularity, NeuroImage, +https://doi.org/10.1016/j.neuroimage.2020.116889. + +Run Tracking Pipeline Infant Config + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,infant + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | ├-- *t2w.nii.gz + | ├-- *wm_mask.nii.gz + | ├-- *labels.nii.gz [Only if connectomics is also selected] + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also + | selected] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + ├-- *t2w.nii.gz + ├-- *wm_mask.nii.gz + ├-- *labels.nii.gz [Only if connectomics is also selected] + └-- metrics + └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also + selected] + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET ANAT OPTIONS + --run_bet_anat If set, will perform brain extraction on the input anat volume. + ($run_bet_anat) + Default settings are soft to make sure an already brain extracted volume + is not impacted + by the bet command. The goal is to clean volumes that still have + portions of non-brain structures. + --bet_anat_f Fractional intensity threshold for bet. ($bet_anat_f) + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL OF SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nb_seeds Number of seeds related to the seeding type param. ($local_nb_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nb_seeds Number of seeds related to the seeding type param. ($pft_nb_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_Infant_Tracking/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is tractoflow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index 46f63a1..1473fb0 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -11,20 +11,23 @@ process BET_DWI { output: tuple val(sid), path("${sid}__dwi_bet.nii.gz"), emit: bet_dwi script: + // ** Using a combination of preliminary bet, powder average computation and then final bet. ** // + // ** This might not be necessary for good quality data, but returns much more robust results on ** // + // ** infant data. ** // """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 scil_extract_b0.py $dwi $bval $bvec ${sid}__b0_mean.nii.gz\ --b0_thr $params.b0_thr --force_b0_threshold --mean - bet2 ${sid}__b0_mean.nii.gz ${sid}__b0_bet -f $params.initial_bet_f -m + bet ${sid}__b0_mean.nii.gz ${sid}__b0_bet -f $params.initial_bet_f -m -R scil_image_math.py convert ${sid}__b0_bet_mask.nii.gz ${sid}__b0_bet_mask.nii.gz\ --data_type uint8 -f mrcalc $dwi ${sid}__b0_bet_mask.nii.gz -mult ${sid}__dwi_bet_prelim.nii.gz\ -quiet -force -nthreads 1 scil_compute_powder_average.py ${sid}__dwi_bet_prelim.nii.gz $bval\ ${sid}__powder_avg.nii.gz --b0_thr $params.b0_thr -f - bet2 ${sid}__powder_avg.nii.gz ${sid}__powder_avg_bet -m -f $params.final_bet_f + bet ${sid}__powder_avg.nii.gz ${sid}__powder_avg_bet -m -R -f $params.final_bet_f scil_image_math.py convert ${sid}__powder_avg_bet_mask.nii.gz ${sid}__powder_avg_bet_mask.nii.gz\ --data_type uint8 -f mrcalc $dwi ${sid}__powder_avg_bet_mask.nii.gz -mult ${sid}__dwi_bet.nii.gz\ @@ -39,13 +42,15 @@ process BET_T2 { input: tuple val(sid), path(anat) output: - tuple val(sid), path("${sid}__t2w_bet.nii.gz"), emit: bet_t2 + tuple val(sid), path("${sid}__t2w_bet.nii.gz"), emit: t2_bet + when: + params.infant_config script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - bet2 $anat ${sid}__t2w_bet.nii.gz -f $params.bet_anat_f + bet $anat ${sid}__t2w_bet.nii.gz -f $params.bet_anat_f -R """ } @@ -230,9 +235,11 @@ process CROP_ANAT { cpus 1 input: - tuple val(sid), path(t2w), path(wm_mask) + tuple val(sid), path(t2w), path(mask) output: - tuple val(sid), path("${sid}__t2w_cropped.nii.gz"), path("${sid}__wm_mask_cropped.nii.gz"), emit: cropped_t2w_and_mask + tuple val(sid), + path("${sid}__t2w_cropped.nii.gz"), + path("${sid}__mask_cropped.nii.gz"), emit: cropped_anat_and_mask script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 @@ -240,7 +247,7 @@ process CROP_ANAT { export OPENBLAS_NUM_THREADS=1 scil_crop_volume.py $t2w ${sid}__t2w_cropped.nii.gz\ --output_bbox t2w_boundingBox.pkl -f - scil_crop_volume.py $wm_mask ${sid}__wm_mask_cropped.nii.gz\ + scil_crop_volume.py $mask ${sid}__mask_cropped.nii.gz\ --input_bbox t2w_boundingBox.pkl -f """ } @@ -274,7 +281,8 @@ process BET_T1 { input: tuple val(sid), path(t1) output: - tuple val(sid), path("${sid}__t1_bet.nii.gz"), + tuple val(sid), + path("${sid}__t1_bet.nii.gz"), path("${sid}__t1_bet_mask.nii.gz"), emit: t1_and_mask_bet when: !params.infant_config @@ -301,6 +309,8 @@ process RESAMPLE_ANAT { tuple val(sid), path(t2w), path(mask) output: tuple val(sid), path("${sid}__t2w_resampled.nii.gz"), path("${sid}__mask_resampled.nii.gz"), emit: t2w_and_mask + when: + params.infant_config script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 @@ -345,7 +355,7 @@ process NORMALIZE { export OPENBLAS_NUM_THREADS=1 shells=\$(awk -v max="$params.max_dti_shell_value" '{for (i = 1; i <= NF; i++) {v = int(\$i);if (v <= max) shells[v] = 1;}}END {for (v in shells) print v;}' "$bval" |\ - sort -n | tr '\n' ' ') + sort -n | tr '\n' ' ') scil_extract_dwi_shell.py $dwi $bval $bvec \$shells\ dwi_dti.nii.gz bval_dti bvec_dti -t $params.dwi_shell_tolerance diff --git a/modules/tracking/workflows/preprocessing.nf b/modules/tracking/workflows/preprocessing.nf index 5f7e48e..1f28903 100644 --- a/modules/tracking/workflows/preprocessing.nf +++ b/modules/tracking/workflows/preprocessing.nf @@ -5,89 +5,116 @@ nextflow.enable.dsl=2 include { BET_DWI; BET_T2; + BET_T1; DENOISING; + DENOISE_T1; TOPUP; EDDY_TOPUP; N4; + N4_T1; CROP_DWI; CROP_ANAT; RESAMPLE_ANAT; + RESAMPLE_T1; NORMALIZE; RESAMPLE_DWI; EXTRACT_B0 } from '../processes/preprocess.nf' -workflow PREPROCESSING { +workflow DWI { take: dwi_channel rev_channel - anat_channel - wm_mask_channel main: + // ** Bet ** // BET_DWI(dwi_channel) + + // ** Denoising ** // DENOISING(BET_DWI.out) + // ** Topup ** // topup_channel = dwi_channel .map{[it[0], it[2], it[3]]} .combine(DENOISING.out, by: 0) .combine(rev_channel, by: 0) .map{ sid, bvals, bvecs, dwi, rev -> tuple(sid, dwi, bvals, bvecs, rev)} - TOPUP(topup_channel) + // ** Eddy ** // eddy_channel = dwi_channel .map{[it[0], it[2], it[3]]} .combine(DENOISING.out, by: 0) .combine(TOPUP.out.topup_result, by: 0) - .map{ sid, bvals, bvecs, dwi, corrected_b0s, field, movpar -> tuple(sid, dwi, bvals, bvecs, corrected_b0s, field, movpar)} - + .map{ sid, bvals, bvecs, dwi, corrected_b0s, field, movpar -> tuple(sid, dwi, bvals, bvecs, corrected_b0s, + field, movpar)} EDDY_TOPUP(eddy_channel) + // ** N4 ** // n4_channel = EDDY_TOPUP.out.dwi_bval_bvec .combine(EDDY_TOPUP.out.b0_mask, by: 0) - N4(n4_channel) + // ** Crop ** // dwi_crop_channel = N4.out - .join(EDDY_TOPUP.out.b0_mask) - + .combine(EDDY_TOPUP.out.b0_mask, by: 0) CROP_DWI(dwi_crop_channel) - anat_crop_channel = anat_channel - - if (params.run_bet_anat) { - BET_T2(anat_channel) - anat_crop_channel = BET_T2.out.bet_t2 - } - - anat_crop_channel = anat_crop_channel - .combine(wm_mask_channel, by:0) - - CROP_ANAT(anat_crop_channel) - RESAMPLE_ANAT(CROP_ANAT.out.cropped_t2w_and_mask) - + // ** Normalization ** // normalize_channel = CROP_DWI.out.dwi .combine(EDDY_TOPUP.out.dwi_bval_bvec.map{[it[0], it[2], it[3]]}, by: 0) .combine(CROP_DWI.out.mask, by: 0) - NORMALIZE(normalize_channel) + // ** Resampling ** // resample_dwi_channel = NORMALIZE.out.dwi_normalized .combine(CROP_DWI.out.mask, by: 0) - RESAMPLE_DWI(resample_dwi_channel) + // ** Extracting b0 ** // extract_b0_channel = EDDY_TOPUP.out.dwi_bval_bvec .map{[it[0], it[2], it[3]]} .combine(RESAMPLE_DWI.out.dwi_resampled, by: 0) .map{ sid, bval, bvec, dwi -> tuple(sid, dwi, bval, bvec)} - EXTRACT_B0(extract_b0_channel) emit: dwi_bval_bvec = extract_b0_channel b0_and_mask = EXTRACT_B0.out.b0_and_mask - t2w_and_mask = RESAMPLE_ANAT.out.t2w_and_mask -} \ No newline at end of file +} + +workflow ANAT { + take: + anat_channel + + main: + // ** Denoising ** // + DENOISE_T1(anat_channel) + + // ** N4 ** // + N4_T1(DENOISE_T1.out.t1_denoised) + + // ** Resampling ** // + RESAMPLE_T1(N4_T1.out.t1_n4) + // ** Resample if -profile infant ** // + RESAMPLE_ANAT(anat_channel) + + // ** Bet ** // + BET_T1(RESAMPLE_T1.out.t1_resampled) + // ** Bet if -profile infant ** // + BET_T2(RESAMPLE_ANAT.out.t2w_and_mask.map{ [it[0], it[1]] }) + + // ** Crop ** // + if ( params.infant_config ) { + crop_channel = BET_T2.out.t2_bet + .combine(RESAMPLE_ANAT.out.t2w_and_mask.map{ [it[0], it[2]] }, by: 0) + CROP_ANAT(crop_channel) + } + else { + CROP_ANAT(BET_T1.out.t1_and_mask_bet) + } + + emit: + anat_and_mask = CROP_ANAT.out.cropped_anat_and_mask +} diff --git a/modules/tracking/workflows/registration.nf b/modules/tracking/workflows/registration.nf index e7b21d5..1e060ec 100644 --- a/modules/tracking/workflows/registration.nf +++ b/modules/tracking/workflows/registration.nf @@ -14,19 +14,20 @@ workflow REGISTRATION { b0_channel main: + // ** If -profile infant is selected, will do registration from t2w on MD. ** // t2_reg_channel = fa_md_channel .map{ [it[0], it[2]] } .combine(anat_and_mask, by: 0) - REGISTER_T2(t2_reg_channel) + // ** Classical registration from t1w to b0/FA. ** // t1_reg_channel = fa_md_channel .map{ [it[0], it[1]] } .combine(anat_and_mask, by: 0) .combine(b0_channel, by: 0) - REGISTER_T1(t1_reg_channel) + // ** Organising channel for output. ** // if ( params.infant_config ) { warped_anat = REGISTER_T2.out.warped_anat transfos = REGISTER_T2.out.transfos diff --git a/modules/tracking/workflows/tracking.nf b/modules/tracking/workflows/tracking.nf index d910494..9dfd2c2 100644 --- a/modules/tracking/workflows/tracking.nf +++ b/modules/tracking/workflows/tracking.nf @@ -21,51 +21,48 @@ workflow TRACKING { main: if ( params.infant_config ) { - + // ** Creating masks channel and generating seeding and tracking masks. ** // masks_channel = anat_and_mask_channel - .map{ [it[0], it[2]] } - .combine(fa_channel, by: 0) - + .map{ [it[0], it[2]] } + .combine(fa_channel, by: 0) GENERATE_MASKS(masks_channel) + // ** Performing local tracking. ** // tracking_channel = fodf_channel .combine(GENERATE_MASKS.out.masks, by: 0) - LOCAL_TRACKING(tracking_channel) - out_channel = LOCAL_TRACKING.out.tractogram - } else { - + // ** Segmenting tissues using fslfast ** // anat_channel = anat_and_mask_channel.map{ [it[0], it[1]] } - SEGMENT_TISSUES(anat_channel) + // ** If --run_local_tracking is set, this will be run ** // + // ** Generating seeding and tracking mask ** // local_masks_channel = SEGMENT_TISSUES.out.masks .map{ [it[0], it[1]] } .combine(fa_channel, by: 0) - LOCAL_TRACKING_MASK(local_masks_channel) LOCAL_SEEDING_MASK(local_masks_channel) + // ** Performing local tracking ** // local_tracking_channel = fodf_channel .combine(LOCAL_SEEDING_MASK.out.seeding_mask, by: 0) .combine(LOCAL_TRACKING_MASK.out.tracking_mask, by: 0) - LOCAL_TRACKING(local_tracking_channel) + // ** If --run_pft_tracking is set, this will be run ** // + // ** Creating PFT masks. ** // PFT_TRACKING_MASK(SEGMENT_TISSUES.out.maps) - pft_masks_channel = SEGMENT_TISSUES.out.masks .map{ [it[0], it[1]] } .combine(fa_channel, by: 0) .combine(PFT_TRACKING_MASK.out.interface_map, by: 0) - PFT_SEEDING_MASK(pft_masks_channel) + // ** Performing PFT tracking ** // pft_tracking_channel = fodf_channel .combine(PFT_TRACKING_MASK.out.tracking_maps, by: 0) .combine(PFT_SEEDING_MASK.out.seeding_mask, by: 0) - PFT_TRACKING(pft_tracking_channel) } diff --git a/nextflow.config b/nextflow.config index a51c176..38cb395 100644 --- a/nextflow.config +++ b/nextflow.config @@ -165,13 +165,19 @@ params { run_tracking = false run_connectomics = false infant_config = false + template_config = false + + // Template Options // + references = "./references/" Mean_FRF_Publish_Dir = "./Results_Infant_Tracking/Mean_FRF" + Pop_Avg_Publish_Dir = "./Results_Infant_Tracking/Pop_Avg" } if(params.output_dir) { process.publishDir = {"$params.output_dir/$sid/$task.process"} params.Mean_FRF_Publish_Dir = "${params.output_dir}/Mean_FRF" + params.Pop_Avg_Publish_Dir = "${params.output_dir}/Pop_Avg" } if(params.processes) { @@ -242,4 +248,12 @@ profiles { params.iso_diff = "2.0E-3" } + + template { + params.template_config = true + + //** BET ANAT Options **// + params.bet_anat_f = 0.1 + + } } \ No newline at end of file From f40fd1a03dd58c710a1f86845dac9df09d396e12 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 25 Oct 2023 10:06:41 -0400 Subject: [PATCH 15/54] added CI --- .github/workflows/ci.yml | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index da3c779..ea0963c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,22 +3,13 @@ on: [push, pull_request] jobs: test: - env: - NXF_VER: ${{ matrix.nxf_ver }} - NXF_ANSI_LOG: false runs-on: ubuntu-latest - strategy: - matrix: - nxf_ver: ['22.10.6', '23.10.0'] steps: - - uses: actions/checkout@v2 - - name: Install Nextflow - run: | - wget -qO- get.nextflow.io | bash - sudo mv nextflow /usr/local/bin/ + - uses: actions/checkout@v4 + - uses: nf-core/setup-nextflow@v1 - name: Pull docker image run: | - docker pull scilus/docker-tractoflow:latest + docker pull scilus/scilus:latest - name: Run pipeline run: | nextflow run ${GITHUB_WORKSPACE} --help -with-docker scilus/docker-tractoflow:latest \ No newline at end of file From 71ccc8beb4c794131180e2db240cc1046a987ed1 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 26 Oct 2023 12:00:14 -0400 Subject: [PATCH 16/54] Added freesurferflow --- .github/workflows/ci.yml | 2 +- README.md | 4 +- USAGE | 65 ++++---- main.nf | 102 ++++++++++--- modules/connectomics/USAGE | 30 ++-- modules/connectomics/USAGE_INFANT | 30 ++-- modules/freesurfer/USAGE | 108 ++++++++++++++ modules/freesurfer/processes/atlases.nf | 113 ++++++++++++++ modules/freesurfer/processes/freesurfer.nf | 18 +++ .../freesurfer/workflows/freesurferflow.nf | 139 ++++++++++++++++++ modules/io.nf | 138 ++++++++++++++++- modules/tracking/USAGE | 54 ++++--- modules/tracking/USAGE_INFANT | 35 +++-- modules/tracking/processes/preprocess.nf | 39 +++++ modules/tracking/workflows/preprocessing.nf | 42 +++++- nextflow.config | 36 ++++- 16 files changed, 835 insertions(+), 120 deletions(-) create mode 100644 modules/freesurfer/USAGE create mode 100644 modules/freesurfer/processes/atlases.nf create mode 100644 modules/freesurfer/processes/freesurfer.nf create mode 100644 modules/freesurfer/workflows/freesurferflow.nf diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ea0963c..cf9cee6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,7 @@ name: CI on: [push, pull_request] jobs: - test: + pipeline-compilation: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/README.md b/README.md index fececc0..d3665fa 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ -Infant-DWI -=============== +ChildBrainFlow Pipeline +======================= Complete pipeline to perform tractography from infant diffusion MRI data. Adapted from the SCIL TractoFlow Pipeline (https://github.com/scilus/tractoflow.git) and Connectoflow Pipeline (https://github.com/scilus/connectoflow.git). SINGULARITY diff --git a/USAGE b/USAGE index ab34601..ef497f9 100644 --- a/USAGE +++ b/USAGE @@ -1,41 +1,52 @@ -DWI Pipeline -======================== +ChildBrainFlow Pipeline +======================= -Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. -Made for use on newborn diffusion MRI data. +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: -This pipeline performs tractography on newborn dMRI data using already segmented WM and brain -mask. Those mask can come from any structural segmentation pipeline (dHCP, Infant-Freesurfer, -Neocivet, etc.). It is RECOMMENDED to provide an already brain-extracted T2w volume, but if -it is not the case, please use the --run_bet_t2w option. + 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) -To simply perform tractography, use -profile tracking. The pipeline will only perform the tracking -related processes. +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** -It is possible to also run a connectivity analysis following tracking. Using -profile connectomics, -the pipeline will perform connectivity analysis based on atlas segmentation. The connectomics processes -are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). If you are using -only the connectomics profile, you need to provide all the necessary files to transform labels, run commit, -run afd_fixel and compute metrics (see structure below). +Steps Selection +--------------- -Both analysis (tracking and connectomics) can be performed one after another automatically (using --profile tracking,connectomics). The pipeline will then reorganised channel to provide the correct inputs. +It is possible to choose which part of the pipeline will be run by using the -profile parameter. Depending +on which -profile is selected, input files will differ. To get a list of the required input files, use this +command: -[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. + nextflow run ChildBrainFlow/main.nf --help -profile {desired_profile} +Available Profiles +------------------ -Run Tracking Pipeline +Here is a list of available profiles: -nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking + 1. tracking : If selected, preprocessing of DWI and Anatomical data will be performed followed by + local modelling and tractography (see [1] for details). + 2. connectomics : If selected, labels registration, tractogram segmentation and connectivity will be + performed. + 3. freesurfer : If selected, FreeSurfer Recon-all will be run on input T1s and label files will be + generated (available atlases: freesurfer, brainnetome and glasser). Only available + if T1 volume is supplied as input (therefore, not with -profile infant). + 4. infant : If selected, the pipeline will assume the data is from infant patients (< 2 years old) + and adapt some parameters to perform tractography and connectomics. -Run Connectomics Pipeline +Multiple profiles can be selected at the same time, the pipeline will simply organised channels to seemlessly +connect each steps. -nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile connectomics +To view the required input files, select all the profiles you want to run (ex: tracking, connectomics and infant) +and run this command : -Run Both Pipeline + nextflow run ChildBrainFlow/main.nf --help -profile tracking,connectomics,infant -nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics \ No newline at end of file +[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., + TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline + leveraging Nextflow & Singularity, NeuroImage, + https://doi.org/10.1016/j.neuroimage.2020.116889. diff --git a/main.nf b/main.nf index e7280c4..bc23248 100644 --- a/main.nf +++ b/main.nf @@ -4,26 +4,35 @@ nextflow.enable.dsl=2 params.help = false -// Importing modules and processes -include { fetch_id; - get_data_tracking; - get_data_connectomics; - get_data_template } from "./modules/io.nf" -include { DWI; - ANAT } from "./modules/tracking/workflows/preprocessing.nf" -include { DTI } from "./modules/tracking/workflows/DTI.nf" -include { SH } from "./modules/tracking/workflows/SH.nf" -include { REGISTRATION } from "./modules/tracking/workflows/registration.nf" -include { FODF } from "./modules/tracking/workflows/FODF.nf" -include { TRACKING } from "./modules/tracking/workflows/tracking.nf" -include { CONNECTOMICS } from "./modules/connectomics/workflows/connectomics.nf" -include { POPULATION_TEMPLATE } from "./modules/template/workflows/pop_template.nf" +// ** Importing modules and processes ** // +include { fetch_id; + get_data_freesurfer; + get_data_tracking; + get_data_tracking_infant; + get_data_connectomics; + get_data_connectomics_infant; + get_data_template } from "./modules/io.nf" +include { DWI; + ANAT } from "./modules/tracking/workflows/preprocessing.nf" +include { DTI } from "./modules/tracking/workflows/DTI.nf" +include { SH } from "./modules/tracking/workflows/SH.nf" +include { REGISTRATION } from "./modules/tracking/workflows/registration.nf" +include { FODF } from "./modules/tracking/workflows/FODF.nf" +include { TRACKING } from "./modules/tracking/workflows/tracking.nf" +include { CONNECTOMICS } from "./modules/connectomics/workflows/connectomics.nf" +include { POPULATION_TEMPLATE } from "./modules/template/workflows/pop_template.nf" +include { FREESURFERFLOW } from "./modules/freesurfer/workflows/freesurferflow.nf" workflow { if (params.help) { display_usage() } else { display_run_info() + // ** Checking compatibility between profiles. ** // + if ( params.infant_config && params.run_freesurfer ) { + error "Profiles infant_config and freesurfer are not compatible since infant_freesurfer is not implemented." + } + if ( params.template_config ) { data = get_data_template() @@ -34,8 +43,18 @@ workflow { data.fa_ref) } + if ( params.freesurfer ) { + data = get_data_freesurfer() + + FREESURFERFLOW(data.anat) + } + if ( params.run_tracking ) { - data = get_data_tracking() + if ( params.infant_config ) { + data = get_data_tracking_infant() + } else { + data = get_data_tracking() + } // ** Merging mask and anat if -profile infant. ** // if ( params.infant_config ) { @@ -89,11 +108,15 @@ workflow { // ** Fetch tracking data ** // tracking = TRACKING.out.trk - // ** Labels needs to be provided as an input, since they are not computed at ** // - // ** some point in the pipeline ** // - input = file(params.input) - labels = Channel.fromFilePairs("$input/**/*labels.nii.gz", size: 1, flat: true) - { fetch_id(it.parent, input) } + // ** Fetching labels from freesurferflow if -profile freesurfer is used, if not, ** // + // ** fetching it from input files. ** // + if ( !params.run_freesurfer ) { + input = file(params.input) + labels = Channel.fromFilePairs("$input/**/*labels.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + } else { + labels = FREESURFERFLOW.out.labels + } // ** Preparing metrics channel ** // dwi_peaks = DWI.out.dwi_bval_bvec @@ -137,12 +160,22 @@ workflow { } if ( params.run_connectomics && !params.run_tracking ) { - data = get_data_connectomics() + if ( params.infant_config ) { + data = get_data_connectomics_infant() + } else { + data = get_data_connectomics() + } + + if ( params.run_freesurfer ) { + labels = FREESURFERFLOW.out.labels + } else { + labels = data.labels + } metrics = data.metrics.transpose().groupTuple() CONNECTOMICS(data.trk, - data.labels, + labels, data.dwi_peaks, data.fodf, metrics, @@ -161,6 +194,7 @@ if (!params.help) { } def display_usage () { + if (params.run_tracking && !params.infant_config) { usage = file("$projectDir/modules/tracking/USAGE") } @@ -179,6 +213,7 @@ def display_usage () { cpu_count = Runtime.runtime.availableProcessors() bindings = ["b0_thr":"$params.b0_thr", + "skip_dwi_preprocessing":"$params.skip_dwi_preprocessing", "initial_bet_f":"$params.initial_bet_f", "final_bet_f":"$params.final_bet_f", "run_bet_anat":"$params.run_bet_anat", @@ -217,6 +252,7 @@ def display_usage () { "roi_radius":"$params.roi_radius", "set_frf":"$params.set_frf", "manual_frf":"$params.manual_frf", + "number_of_tissues":"$params.number_of_tissues", "run_pft_tracking":"$params.run_pft_tracking", "pft_compress_streamlines":"$params.pft_compress_streamlines", "pft_seeding_mask_type":"$params.pft_seeding_mask_type", @@ -280,9 +316,29 @@ def display_usage () { "processes_afd_fixel":"$params.processes_afd_fixel", "processes_connectivity":"$params.processes_connectivity", "references":"$params.references", + "use_freesurfer_atlas":"$params.use_freesurfer_atlas", + "use_brainnetome_atlas":"$params.use_brainnetome_atlas", + "use_glasser_atlas":"$params.use_glasser_atlas", + "use_schaefer_100_atlas":"$params.use_schaefer_100_atlas", + "use_schaefer_200_atlas":"$params.use_schaefer_200_atlas", + "use_schaefer_400_atlas":"$params.use_schaefer_400_atlas", + "use_lausanne_1_atlas":"$params.use_lausanne_1_atlas", + "use_lausanne_2_atlas":"$params.use_lausanne_2_atlas", + "use_lausanne_3_atlas":"$params.use_lausanne_3_atlas", + "use_lausanne_4_atlas":"$params.use_lausanne_4_atlas", + "use_lausanne_5_atlas":"$params.use_lausanne_5_atlas", + "use_dilated_labels":"$params.use_dilated_labels", + "nb_threads":"$params.nb_threads", + "atlas_utils_folder":"$params.atlas_utils_folder", + "compute_FS_BN_GL_SF":"$params.compute_FS_BN_GL_SF", + "compute_lausanne_multiscale":"$params.compute_lausanne_multiscale", + "compute_lobes":"$params.compute_lobes", + "run_freesurfer":"$params.run_freesurfer", "run_tracking":"$params.run_tracking", "run_connectomics":"$params.run_connectomics", - "template_config":"$params.template_config" + "template_config":"$params.template_config", + "processes":"$params.processes", + "cpu_count":"$cpu_count" ] engine = new groovy.text.SimpleTemplateEngine() diff --git a/modules/connectomics/USAGE b/modules/connectomics/USAGE index 12b174f..79ca104 100644 --- a/modules/connectomics/USAGE +++ b/modules/connectomics/USAGE @@ -1,10 +1,18 @@ -DWI Pipeline -======================== +ChildBrainFlow Pipeline +======================= -Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: -The connectomics processes are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). + 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** [1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline @@ -25,9 +33,9 @@ DESCRIPTION | ├-- *dwi.nii.gz | ├-- *.bval | ├-- *.bvec - | |-- *t2w_warped.nii.gz + | |-- *t1.nii.gz [Registered to diff space.] | ├-- *.trk - | ├-- *labels.nii.gz + | ├-- *labels.nii.gz [Native t1 space, optional if -profile freesurfer is used] | ├-- *peaks.nii.gz | ├-- *fodf.nii.gz | ├-- OGenericAffine.mat @@ -38,9 +46,9 @@ DESCRIPTION ├-- *dwi.nii.gz ├-- *bval ├-- *bvec - |-- *t2w_warped.nii.gz + |-- *t1.nii.gz [Registered to diff space.] ├-- *.trk - ├-- *labels.nii.gz + ├-- *labels.nii.gz [Native t1 space, optional if -profile freesurfer is used] ├-- *peaks.nii.gz ├-- *fodf.nii.gz ├-- OGenericAffine.mat @@ -74,6 +82,8 @@ DESCRIPTION --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. --processes_commit Number of processes for COMMIT task ($processes_commit) --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) --processes_connectivity Number of processes for connectivity task ($processes_connectivity) @@ -94,6 +104,8 @@ macos When used, the scratch folder wi tracking When used, will perform the tracking pipeline to generate the whole-brain tractogram from raw diffusion images. +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + connectomics When used, will perform connectivity analysis between atlas-based segmentation. @@ -105,7 +117,7 @@ Singularity container is used. The intermediate working directory is, by default, set to './work'. To change it, use the '-w WORK_DIR' argument. -The default config file is tractoflow/nextflow.config. +The default config file is ChildBrainFlow/nextflow.config. Use '-C config_file.config' to specify a non-default configuration file. The '-C config_file.config' must be inserted after the nextflow call like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/USAGE_INFANT b/modules/connectomics/USAGE_INFANT index 70e577f..854620d 100644 --- a/modules/connectomics/USAGE_INFANT +++ b/modules/connectomics/USAGE_INFANT @@ -1,10 +1,18 @@ -DWI Pipeline -======================== +ChildBrainFlow Pipeline +======================= -Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: -The connectomics processes are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). + 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** [1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline @@ -25,9 +33,9 @@ DESCRIPTION | ├-- *dwi.nii.gz | ├-- *.bval | ├-- *.bvec - | |-- *t2w_warped.nii.gz + | |-- *t2w.nii.gz [Registered to diff space.] | ├-- *.trk - | ├-- *labels.nii.gz + | ├-- *labels.nii.gz [Native t2w space.] | ├-- *peaks.nii.gz | ├-- *fodf.nii.gz | ├-- OGenericAffine.mat @@ -38,9 +46,9 @@ DESCRIPTION ├-- *dwi.nii.gz ├-- *bval ├-- *bvec - |-- *t2w_warped.nii.gz + |-- *t2w.nii.gz [Registered to diff space.] ├-- *.trk - ├-- *labels.nii.gz + ├-- *labels.nii.gz [Native t2w space.] ├-- *peaks.nii.gz ├-- *fodf.nii.gz ├-- OGenericAffine.mat @@ -74,6 +82,8 @@ DESCRIPTION --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. --processes_commit Number of processes for COMMIT task ($processes_commit) --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) --processes_connectivity Number of processes for connectivity task ($processes_connectivity) @@ -94,6 +104,8 @@ macos When used, the scratch folder wi tracking When used, will perform the tracking pipeline to generate the whole-brain tractogram from raw diffusion images. +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + connectomics When used, will perform connectivity analysis between atlas-based segmentation. @@ -105,7 +117,7 @@ Singularity container is used. The intermediate working directory is, by default, set to './work'. To change it, use the '-w WORK_DIR' argument. -The default config file is tractoflow/nextflow.config. +The default config file is ChildBrainFlow/nextflow.config. Use '-C config_file.config' to specify a non-default configuration file. The '-C config_file.config' must be inserted after the nextflow call like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/freesurfer/USAGE b/modules/freesurfer/USAGE new file mode 100644 index 0000000..502bed9 --- /dev/null +++ b/modules/freesurfer/USAGE @@ -0,0 +1,108 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., + TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline + leveraging Nextflow & Singularity, NeuroImage, + https://doi.org/10.1016/j.neuroimage.2020.116889. + + +Run FreeSurferFlow Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile freesurfer + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | └-- *t1.nii.gz + └-- S2 + └-- *t1.nii.gz + + --use_freesurfer_atlas If set, will use the freesurfer atlas if -profile connectomics is used. + ($use_freesurfer_atlas) + --use_brainnetome_atlas If set, will use the brainnetome atlas if -profile connectomics is used. + This is the default setting. ($use_brainnetome_atlas) + --use_glasser_atlas If set, will use the Glasser atlas if -profile connectomics is used. + ($use_glasser_atlas) + --use_schaefer_100_atlas If set, will use the Schaefer 100 atlas if -profile connectomics is used. + ($use_schaefer_100_atlas) + --use_schaefer_200_atlas If set, will use the Schaefer 200 atlas if -profile connectomics is used. + ($use_schaefer_200_atlas) + --use_schaefer_400_atlas If set, will use the Schaefer 400 atlas if -profile connectomics is used. + ($use_schaefer_400_atlas) + --use_lausanne_1_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_1_atlas) + --use_lausanne_2_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_2_atlas) + --use_lausanne_3_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_3_atlas) + --use_lausanne_4_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_4_atlas) + --use_lausanne_5_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_5_atlas) + --use_dilated_labels If set, will use the dilated version of the atlas selected above. + ($use_dilated_labels) + + +OPTIONAL ARGUMENTS (current value) + +[FREESURFERFLOW OPTIONS] + + --atlas_utils_folder Folder needed to convert freesurfer atlas to other atlases + ($atlas_util_folder) + --nb_threads Number of threads used by recon-all and the atlases creation + ($nb_threads) + --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) + * FreeSurfer (adapted) + * Brainnetome + * Glasser + * Schaefer (100/200/400) + --compute_lausanne_multiscale Compute the connectivity multiscale atlases from Lausanne + ($compute_lausanne_multiscale) + --compute_lobes Compute the lobes atlas. ($compute_lobes) + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf new file mode 100644 index 0000000..075280c --- /dev/null +++ b/modules/freesurfer/processes/atlases.nf @@ -0,0 +1,113 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +process FS_BN_GL_SF { + cpus params.nb_threads + + input: + tuple val(sid), path(folder) + + output: + tuple val(sid), path("*[brainnetome]*.nii.gz"), emit: brainnetome + tuple val(sid), path("*[freesurfer]*.nii.gz"), emit: freesurfer + tuple val(sid), path("*[schaefer_100]*.nii.gz"), emit: schaefer_100 + tuple val(sid), path("*[schaefer_200]*.nii.gz"), emit: schaefer_200 + tuple val(sid), path("*[schaefer_400]*.nii.gz"), emit: schaefer_400 + path("*[brainnetome,freesurfer,glasser,schaefer]*.txt") + path("*[brainnetome,freesurfer,glasser,schaefer]*.json") + + when: + params.compute_FS_BN_GL_SF + + script: + """ + ln -s $params.atlas_utils_folder/fsaverage \$(dirname ${folder})/ + bash $params.atlas_utils_folder/freesurfer_utils/generate_atlas_FS_BN_GL_SF_v5.sh \$(dirname ${folder}) \ + ${sid} ${params.nb_threads} FS_BN_GL_SF_Atlas/ + cp $sid/FS_BN_GL_SF_Atlas/* ./ + """ +} + +process LOBES { + cpus params.nb_threads + + input: + tuple val(sid), path(folder) + + output: + path("*lobes*.nii.gz"), emit: lobes + path("*lobes*.txt") + path("*lobes*.json") + + when: + params.compute_lobes + + script: + """ + mri_convert ${folder}/mri/rawavg.mgz rawavg.nii.gz + + mri_convert ${folder}/mri/wmparc.mgz wmparc.nii.gz + scil_reshape_to_reference.py wmparc.nii.gz rawavg.nii.gz wmparc.nii.gz --interpolation nearest -f + scil_image_math.py convert wmparc.nii.gz wmparc.nii.gz --data_type uint16 -f + + mri_convert ${folder}/mri/brainmask.mgz brain_mask.nii.gz + scil_image_math.py lower_threshold brain_mask.nii.gz 0.001 brain_mask.nii.gz --data_type uint8 -f + scil_image_math.py dilation brain_mask.nii.gz 1 brain_mask.nii.gz -f + scil_reshape_to_reference.py brain_mask.nii.gz rawavg.nii.gz brain_mask.nii.gz --interpolation nearest -f + scil_image_math.py convert brain_mask.nii.gz brain_mask.nii.gz --data_type uint8 -f + + scil_combine_labels.py atlas_lobes_v5.nii.gz -v wmparc.nii.gz 1003 1012 1014 1017 1018 1019 1020 1024 1027 1028 \ + 1032 -v wmparc.nii.gz 1008 1022 1025 1029 1031 -v wmparc.nii.gz 1005 1011 1013 1021 -v wmparc.nii.gz 1001 \ + 1006 1007 1009 1015 1015 1030 1033 -v wmparc.nii.gz 1002 1010 1023 1026 -v wmparc.nii.gz 8 -v wmparc.nii.gz \ + 10 11 12 13 17 18 26 28 -v wmparc.nii.gz 2003 2012 2014 2017 2018 2019 2020 2024 2027 2028 2032 \ + -v wmparc.nii.gz 2008 2022 2025 2029 2031 -v wmparc.nii.gz 2005 2011 2013 2021 -v wmparc.nii.gz 2001 2006 \ + 2007 2009 2015 2015 2030 2033 -v wmparc.nii.gz 2002 2010 2023 2026 -v wmparc.nii.gz 49 50 51 52 53 54 58 60 \ + -v wmparc.nii.gz 47 -v wmparc.nii.gz 16 --merge + scil_dilate_labels.py atlas_lobes_v5.nii.gz atlas_lobes_v5_dilate.nii.gz --distance 2 \ + --label_to_dilate 1 2 3 4 5 6 8 9 10 11 12 14 15 --mask brain_mask.nii.gz + cp $params.atlas_utils_folder/freesurfer_utils/*lobes_v5* ./ + """ +} + +process LAUSANNE { + cpus 1 + + input: + tuple val(sid), path(folder) + each path(scale) + + output: + tuple val(sid), path("[lausanne_2008_scale_1]*.nii.gz"), lausanne_1 + tuple val(sid), path("[lausanne_2008_scale_2]*.nii.gz"), lausanne_2 + tuple val(sid), path("[lausanne_2008_scale_3]*.nii.gz"), lausanne_3 + tuple val(sid), path("[lausanne_2008_scale_4]*.nii.gz"), lausanne_4 + tuple val(sid), path("[lausanne_2008_scale_5]*.nii.gz"), lausanne_5 + path("*.txt") + path("*.json") + + when: + params.compute_lausanne_multiscale + + script: + """ + ln -s $params.atlas_utils_folder/fsaverage \$(dirname ${folder})/ + freesurfer_home=\$(dirname \$(dirname \$(which mri_label2vol))) + python3.7 $params.atlas_utils_folder/lausanne_multi_scale_atlas/generate_multiscale_parcellation.py \ + \$(dirname ${folder}) ${sid} \$freesurfer_home --scale ${scale} --dilation_factor 0 --log_level DEBUG + + mri_convert ${folder}/mri/rawavg.mgz rawavg.nii.gz + scil_image_math.py lower_threshold rawavg.nii.gz 0.001 mask.nii.gz --data_type uint8 + scil_reshape_to_reference.py ${folder}/mri/lausanne2008.scale${scale}+aseg.nii.gz mask.nii.gz \ + lausanne_2008_scale_${scale}.nii.gz --interpolation nearest + scil_image_math.py convert lausanne_2008_scale_${scale}.nii.gz lausanne_2008_scale_${scale}.nii.gz \ + --data_type int16 -f + scil_dilate_labels.py lausanne_2008_scale_${scale}.nii.gz lausanne_2008_scale_${scale}_dilate.nii.gz \ + --distance 2 --mask mask.nii.gz + + cp $params.atlas_utils_folder/lausanne_multi_scale_atlas/*.txt ./ + cp $params.atlas_utils_folder/lausanne_multi_scale_atlas/*.json ./ + """ +} + + diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf new file mode 100644 index 0000000..e11e987 --- /dev/null +++ b/modules/freesurfer/processes/freesurfer.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +process FREESURFER { + cpus params.nb_threads + + input: + tuple val(sid), path(anat) + output: + tuple val(sid), "$sid/", emit: folders + + script: + """ + export SUBJECTS_DIR=. + recon-all -i $anat -s $sid -all -parallel -openmp $params.nb_threads + """ +} \ No newline at end of file diff --git a/modules/freesurfer/workflows/freesurferflow.nf b/modules/freesurfer/workflows/freesurferflow.nf new file mode 100644 index 0000000..777df93 --- /dev/null +++ b/modules/freesurfer/workflows/freesurferflow.nf @@ -0,0 +1,139 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +include { + FREESURFER +} from '../processes/freesurfer.nf' +include { + FS_BN_GL_SF; + LOBES; + LAUSANNE +} from '../processes/atlases.nf' + +workflow FREESURFERFLOW { + take: + anat + + main: + + // ** Lauching FreeSurfer Recon-all ** // + FREESURFER(anat) + + // ** Computing FS_BN_GL_SF atlases ** // + FS_BN_GL_SF(FREESURFER.out.folders) + + // ** Computing lobes atlases ** // + LOBES(FREESURFER.out.folders) + + // ** Computing lausanne atlas ** // + scales = Channel.from(1,2,3,4,5) + lausanne_channel = FREESURFER.out.folders + .combine(scales, by: 0) + LAUSANNE(FREESURFER.out.folders) + + // ** Work out a way for the user to select which atlas to use. ** // + // ** Could be cleaner than a bunch of if statements in the future. ** // + if ( params.use_freesurfer_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.freesurfer + .map{ [it[0], it[2]] } + } else { + labels = FS_BN_GL_SF.out.freesurfer + .map{ [it[0], it[1]] } + } + } + if ( params.use_brainnetome_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.brainnetome + .map{ [it[0], it[2]] } + } else { + labels = FS_BN_GL_SF.out.brainnetome + .map{ [it[0], it[1]] } + } + } + if ( params.use_glasser_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.glasser + .map{ [it[0], it[2]] } + } else { + labels = FS_BN_GL_SF.out.glasser + .map{ [it[0], it[1]] } + } + } + if ( params.use_schaefer_100_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.schaefer_100 + .map{ [it[0], it[2]] } + } else { + labels = FS_BN_GL_SF.out.schaefer_100 + .map{ [it[0], it[1]] } + } + } + if ( params.use_schaefer_200_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.schaefer_200 + .map{ [it[0], it[2]] } + } else { + labels = FS_BN_GL_SF.out.schaefer_200 + .map{ [it[0], it[1]] } + } + } + if ( params.use_schaefer_400_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.schaefer_400 + .map{ [it[0], it[2]] } + } else { + labels = FS_BN_GL_SF.out.schaefer_400 + .map{ [it[0], it[1]] } + } + } + if ( params.use_lausanne_1_atlas ) { + if ( params.use_dilated_labels ) { + labels = LAUSANNE.out.lausanne_1 + .map{ [it[0], it[2]] } + } else { + labels = LAUSANNE.out.lausanne_1 + .map{ [it[0], it[1]] } + } + } + if ( params.use_lausanne_2_atlas ) { + if ( params.use_dilated_labels ) { + labels = LAUSANNE.out.lausanne_2 + .map{ [it[0], it[2]] } + } else { + labels = LAUSANNE.out.lausanne_2 + .map{ [it[0], it[1]] } + } + } + if ( params.use_lausanne_3_atlas ) { + if ( params.use_dilated_labels ) { + labels = LAUSANNE.out.lausanne_3 + .map{ [it[0], it[2]] } + } else { + labels = LAUSANNE.out.lausanne_3 + .map{ [it[0], it[1]] } + } + } + if ( params.use_lausanne_4_atlas ) { + if ( params.use_dilated_labels ) { + labels = LAUSANNE.out.lausanne_4 + .map{ [it[0], it[2]] } + } else { + labels = LAUSANNE.out.lausanne_4 + .map{ [it[0], it[1]] } + } + } + if ( params.use_lausanne_5_atlas ) { + if ( params.use_dilated_labels ) { + labels = LAUSANNE.out.lausanne_5 + .map{ [it[0], it[2]] } + } else { + labels = LAUSANNE.out.lausanne_5 + .map{ [it[0], it[1]] } + } + } + + emit: + labels +} \ No newline at end of file diff --git a/modules/io.nf b/modules/io.nf index e5a256c..9099ce0 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -11,9 +11,74 @@ def fetch_id ( dir, dir_base ) { .join("_") } +// ** Getting data for the -profile freesurfer ** // +workflow get_data_freesurfer { + main: + if (! params.input ) { + log.info "You must provide an input folder containing all images required for FreesurferFlow :" + log.info " --input=/path/to/[input_folder] Input folder containing your subjects." + log.info " [input]" + log.info " ├-- S1" + log.info " | └-- *t1.nii.gz" + log.info " └-- S2" + log.info " └-- *t1.nii.gz" + error "Please resubmit your command with the previous file structure." + } + + input = file(params.input) + + // ** Loading files ** // + anat_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) + + emit: + anat = anat_channel +} + // ** Decided to split the data fetching steps for different profiles in different functions ** // // ** for easier code-reading. ** // workflow get_data_tracking { + main: + if ( !params.input ) { + log.info "You must provide an input folder containing all images using:" + log.info " --input=/path/to/[input_folder] Input folder containing multiple subjects for tracking" + log.info "" + log.info " [Input]" + log.info " ├-- S1" + log.info " | ├-- *dwi.nii.gz" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" + log.info " | ├-- *revb0.nii.gz" + log.info " | └-- *t1.nii.gz" + log.info " └-- S2" + log.info " ├-- *dwi.nii.gz" + log.info " ├-- *bval" + log.info " ├-- *bvec" + log.info " ├-- *revb0.nii.gz" + log.info " └-- *t1.nii.gz" + error "Please resubmit your command with the previous file structure." + } + + input = file(params.input) + + // ** Loading all files. ** // + dwi_channel = Channel.fromFilePairs("$input/**/*dwi.{nii.gz,bval,bvec}", size: 3, flat: true) + { fetch_id(it.parent, input) } + rev_channel = Channel.fromFilePairs("$input/**/*revb0.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + anat_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + + // ** Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. ** // + dwi_channel = dwi_channel.map{sid, bvals, bvecs, dwi -> tuple(sid, dwi, bvals, bvecs)} + + emit: + dwi = dwi_channel + rev = rev_channel + anat = anat_channel +} + +// ** Getting data for the -profile tracking,infant ** // +workflow get_data_tracking_infant { main: if ( !params.input ) { log.info "You must provide an input folder containing all images using:" @@ -39,7 +104,7 @@ workflow get_data_tracking { input = file(params.input) - // Loading all files. + // ** Loading all files. ** // dwi_channel = Channel.fromFilePairs("$input/**/*dwi.{nii.gz,bval,bvec}", size: 3, flat: true) { fetch_id(it.parent, input) } rev_channel = Channel.fromFilePairs("$input/**/*revb0.nii.gz", size: 1, flat: true) @@ -49,7 +114,7 @@ workflow get_data_tracking { wm_mask_channel = Channel.fromFilePairs("$input/**/*wm_mask.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } - // Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. + // ** Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. ** // dwi_channel = dwi_channel.map{sid, bvals, bvecs, dwi -> tuple(sid, dwi, bvals, bvecs)} emit: @@ -59,7 +124,76 @@ workflow get_data_tracking { wm_mask = wm_mask_channel } +// ** Fetching data for -profile connectomics ** // workflow get_data_connectomics { + main: + if ( !params.input ) { + log.info "You must provide an input folder containing all images using:" + log.info " --input=/path/to/[input_folder] Input folder containing multiple subjects" + log.info "" + log.info " [Input]" + log.info " ├-- S1" + log.info " | ├-- *dwi.nii.gz" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" + log.info " | ├-- *t1.nii.gz" + log.info " | ├-- *.trk" + log.info " | ├-- *labels.nii.gz" + log.info " | ├-- *peaks.nii.gz" + log.info " | ├-- *fodf.nii.gz" + log.info " | ├-- OGenericAffine.mat" + log.info " | ├-- output1Warp.nii.gz" + log.info " | └-- metrics" + log.info " | └-- METRIC_NAME.nii.gz [Optional]" + log.info " └-- S2" + log.info " ├-- *dwi.nii.gz" + log.info " ├-- *bval" + log.info " ├-- *bvec" + log.info " ├-- *t1.nii.gz" + log.info " ├-- *.trk" + log.info " ├-- *labels.nii.gz" + log.info " ├-- *peaks.nii.gz" + log.info " ├-- *fodf.nii.gz" + log.info " ├-- OGenericAffine.mat" + log.info " ├-- output1Warp.nii.gz" + log.info " └-- metrics" + log.info " └-- METRIC_NAME.nii.gz [Optional]" + error "Please resubmit your command with the previous file structure." + } + + input = file(params.input) + + // Loading all files. + tracking_channel = Channel.fromFilePairs("$input/**/*.trk", size: 1, flat: true) + { fetch_id(it.parent, input) } + labels_channel = Channel.fromFilePairs("$input/**/*labels.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + dwi_peaks_channel = Channel.fromFilePairs("$input/**/{*dwi.nii.gz,*.bval,*.bvec,*peaks.nii.gz}", size: 4, flat: true) + { fetch_id(it.parent, input) } + fodf_channel = Channel.fromFilePairs("$input/**/*fodf.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + metrics_channel = Channel.fromFilePairs("$input/**/metrics/*.nii.gz", size: -1, maxDepth: 2) + { it.parent.parent.name } + t2w_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + transfos_channel = Channel.fromFilePairs("$input/**/{0GenericAffine.mat,output1Warp.nii.gz}", size: 2, flat: true) + { fetch_id(it.parent, input) } + + // Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. + dwi_peaks_channel = dwi_peaks_channel.map{sid, bvals, bvecs, dwi, peaks -> tuple(sid, dwi, bvals, bvecs, peaks)} + + emit: + trk = tracking_channel + labels = labels_channel + dwi_peaks = dwi_peaks_channel + fodf = fodf_channel + metrics = metrics_channel + t2w = t2w_channel + transfos = transfos_channel +} + +// ** Fetching data for -profile connectomics,infant ** // +workflow get_data_connectomics_infant { main: if ( !params.input ) { log.info "You must provide an input folder containing all images using:" diff --git a/modules/tracking/USAGE b/modules/tracking/USAGE index e6c6a0d..09bfbea 100644 --- a/modules/tracking/USAGE +++ b/modules/tracking/USAGE @@ -1,20 +1,24 @@ -DWI Pipeline -======================== +ChildBrainFlow Pipeline +======================= -Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: -It is possible to also run a connectivity analysis following tracking. Using -profile connectomics, -the pipeline will perform connectivity analysis based on atlas segmentation. The connectomics processes -are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). + 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) -Both analysis (tracking and connectomics) can be performed one after another automatically (using --profile tracking,connectomics). The pipeline will then reorganised channel to provide the correct inputs. +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** [1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., -TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline -leveraging Nextflow & Singularity, NeuroImage, -https://doi.org/10.1016/j.neuroimage.2020.116889. + TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline + leveraging Nextflow & Singularity, NeuroImage, + https://doi.org/10.1016/j.neuroimage.2020.116889. + Run Tracking Pipeline @@ -31,7 +35,7 @@ DESCRIPTION | ├-- *.bvec | ├-- *revb0.nii.gz | ├-- *t1w.nii.gz - | ├-- *labels.nii.gz [Only if connectomics is also selected] + | ├-- *labels.nii.gz [Required if -profile connectomics is selected, not required when -profile freesurfer is used.] | └-- metrics | └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also | selected] @@ -41,7 +45,7 @@ DESCRIPTION ├-- *bvec ├-- *revb0.nii.gz ├-- *t1w.nii.gz - ├-- *labels.nii.gz [Only if connectomics is also selected] + ├-- *labels.nii.gz [Required if -profile connectomics is selected, not required when -profile freesurfer is used.] └-- metrics └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also selected] @@ -53,19 +57,19 @@ OPTIONAL ARGUMENTS (current value) --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) BET DWI OPTIONS --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) - BET ANAT OPTIONS - --run_bet_anat If set, will perform brain extraction on the input anat volume. - ($run_bet_anat) - Default settings are soft to make sure an already brain extracted volume - is not impacted - by the bet command. The goal is to clean volumes that still have - portions of non-brain structures. - --bet_anat_f Fractional intensity threshold for bet. ($bet_anat_f) + BET T1 OPTIONS + --template_t1 Path to the template T1 directory for antsBrainExtraction. + The folder must contain t1_template.nii.gz and + t1_brain_probability_map.nii.gz. The default path is the human_data + folder in the singularity container ($template_t1). EDDY AND TOPUP OPTIONS --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) @@ -183,6 +187,8 @@ OPTIONAL ARGUMENTS (current value) (--tracking_seed 1,2,3) PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) --processes_eddy Number of processes for EDDY task. ($processes_eddy) --processes_registration Number of processes for registration task. ($processes_registration) @@ -192,7 +198,7 @@ OPTIONAL ARGUMENTS (current value) OUTPUT OPTIONS --output_dir Directory to write the final results. Default is - "./Results_Infant_Tracking/". + "./Results_ChildBrainFlow/". AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) @@ -204,6 +210,8 @@ macos When used, the scratch folder wi tracking When used, will perform the tracking pipeline to generate the whole-brain tractogram from raw diffusion images. +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + connectomics When used, will perform connectivity analysis between atlas-based segmentation. @@ -215,7 +223,7 @@ Singularity container is used. The intermediate working directory is, by default, set to './work'. To change it, use the '-w WORK_DIR' argument. -The default config file is tractoflow/nextflow.config. +The default config file is ChildBrainFlow/nextflow.config. Use '-C config_file.config' to specify a non-default configuration file. The '-C config_file.config' must be inserted after the nextflow call like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/tracking/USAGE_INFANT b/modules/tracking/USAGE_INFANT index 2472144..d7c0bf4 100644 --- a/modules/tracking/USAGE_INFANT +++ b/modules/tracking/USAGE_INFANT @@ -1,20 +1,24 @@ -DWI Pipeline -======================== +ChildBrainFlow Pipeline +======================= -Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: -It is possible to also run a connectivity analysis following tracking. Using -profile connectomics, -the pipeline will perform connectivity analysis based on atlas segmentation. The connectomics processes -are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). + 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) -Both analysis (tracking and connectomics) can be performed one after another automatically (using --profile tracking,connectomics). The pipeline will then reorganised channel to provide the correct inputs. +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** [1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., -TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline -leveraging Nextflow & Singularity, NeuroImage, -https://doi.org/10.1016/j.neuroimage.2020.116889. + TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline + leveraging Nextflow & Singularity, NeuroImage, + https://doi.org/10.1016/j.neuroimage.2020.116889. + Run Tracking Pipeline Infant Config @@ -55,6 +59,9 @@ OPTIONAL ARGUMENTS (current value) --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) BET DWI OPTIONS --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) @@ -185,6 +192,8 @@ OPTIONAL ARGUMENTS (current value) (--tracking_seed 1,2,3) PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) --processes_eddy Number of processes for EDDY task. ($processes_eddy) --processes_registration Number of processes for registration task. ($processes_registration) @@ -206,6 +215,8 @@ macos When used, the scratch folder wi tracking When used, will perform the tracking pipeline to generate the whole-brain tractogram from raw diffusion images. +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + connectomics When used, will perform connectivity analysis between atlas-based segmentation. @@ -217,7 +228,7 @@ Singularity container is used. The intermediate working directory is, by default, set to './work'. To change it, use the '-w WORK_DIR' argument. -The default config file is tractoflow/nextflow.config. +The default config file is ChildBrainFlow/nextflow.config. Use '-C config_file.config' to specify a non-default configuration file. The '-C config_file.config' must be inserted after the nextflow call like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index 1473fb0..bb5c194 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -10,6 +10,9 @@ process BET_DWI { tuple val(sid), path(dwi), path(bval), path(bvec) output: tuple val(sid), path("${sid}__dwi_bet.nii.gz"), emit: bet_dwi + when: + !params.skip_dwi_preprocessing + script: // ** Using a combination of preliminary bet, powder average computation and then final bet. ** // // ** This might not be necessary for good quality data, but returns much more robust results on ** // @@ -62,6 +65,9 @@ process DENOISING { tuple val(sid), path(dwi) output: tuple val(sid), path("${sid}__dwi_denoised.nii.gz"), emit: denoised_dwi + when: + !params.skip_dwi_preprocessing + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 @@ -81,6 +87,9 @@ process TOPUP { output: tuple val(sid), path("${sid}__corrected_b0s.nii.gz"), path("${params.topup_prefix}_fieldcoef.nii.gz"), path("${params.topup_prefix}_movpar.txt"), emit: topup_result + when: + !params.skip_dwi_preprocessing + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 @@ -114,6 +123,9 @@ process EDDY_TOPUP { tuple val(sid), path("${sid}__dwi_corrected.nii.gz"), path("${sid}__bval_eddy"), path("${sid}__dwi_eddy_corrected.bvec"), emit: dwi_bval_bvec tuple val(sid), path("${sid}__b0_bet_mask.nii.gz"), emit: b0_mask + when: + !params.skip_dwi_preprocessing + script: slice_drop_flag="" if (params.use_slice_drop_correction) @@ -147,6 +159,9 @@ process N4 { tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) output: tuple val(sid), path("${sid}__dwi_n4.nii.gz"), emit: dwi_n4 + when: + !params.skip_dwi_preprocessing + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus @@ -311,6 +326,7 @@ process RESAMPLE_ANAT { tuple val(sid), path("${sid}__t2w_resampled.nii.gz"), path("${sid}__mask_resampled.nii.gz"), emit: t2w_and_mask when: params.infant_config + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 @@ -334,6 +350,9 @@ process NORMALIZE { tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) output: tuple val(sid), path("${sid}__dwi_normalized.nii.gz"), emit: dwi_normalized + when: + !params.skip_dwi_preprocessing + script: if (params.dti_shells) """ @@ -410,4 +429,24 @@ process EXTRACT_B0 { mrthreshold ${sid}__b0_resampled.nii.gz ${sid}__b0_mask_resampled.nii.gz\ --abs 0.00001 -nthreads 1 """ +} + +process DWI_MASK { + cpus 1 + + input: + tuple val(sid), path(dwi), path(bval), path(bvec) + output: + tuple val(sid), path("${sid}__b0_mask.nii.gz"), emit: dwi_mask + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_extract_b0.py $dwi $bval $bvec b0.nii.gz --mean\ + --b0_thr $params.b0_thr --force_b0_threshold + mrthreshold b0.nii.gz ${sid}__b0_mask.nii.gz\ + --abs 0.00001 -nthreads 1 + """ } \ No newline at end of file diff --git a/modules/tracking/workflows/preprocessing.nf b/modules/tracking/workflows/preprocessing.nf index 1f28903..7003f9c 100644 --- a/modules/tracking/workflows/preprocessing.nf +++ b/modules/tracking/workflows/preprocessing.nf @@ -18,7 +18,8 @@ include { RESAMPLE_T1; NORMALIZE; RESAMPLE_DWI; - EXTRACT_B0 + EXTRACT_B0; + DWI_MASK } from '../processes/preprocess.nf' workflow DWI { @@ -57,9 +58,16 @@ workflow DWI { N4(n4_channel) // ** Crop ** // + if ( params.skip_dwi_preprocessing ) { + DWI_MASK(dwi_channel) + crop_channel = dwi_channel.map{ [it[0], it[1]] } + .combine(DWI_MASK.out.dwi_mask, by: 0) + CROP_DWI(crop_channel) + } else { dwi_crop_channel = N4.out .combine(EDDY_TOPUP.out.b0_mask, by: 0) CROP_DWI(dwi_crop_channel) + } // ** Normalization ** // normalize_channel = CROP_DWI.out.dwi @@ -68,16 +76,28 @@ workflow DWI { NORMALIZE(normalize_channel) // ** Resampling ** // + if ( params.skip_dwi_preprocessing ) { + resample_channel = CROP_DWI.out.dwi + .combine(CROP_DWI.out.mask, by: 0) + RESAMPLE_DWI(resample_channel) + } else { resample_dwi_channel = NORMALIZE.out.dwi_normalized .combine(CROP_DWI.out.mask, by: 0) RESAMPLE_DWI(resample_dwi_channel) + } // ** Extracting b0 ** // + if ( params.skip_dwi_preprocessing ) { + extract_b0_channel = RESAMPLE_DWI.out.dwi_resampled + .combine(dwi_channel.map{ [it[0], it[2], it[3]] }, by: 0) + EXTRACT_B0(extract_b0_channel) + } else { extract_b0_channel = EDDY_TOPUP.out.dwi_bval_bvec .map{[it[0], it[2], it[3]]} .combine(RESAMPLE_DWI.out.dwi_resampled, by: 0) .map{ sid, bval, bvec, dwi -> tuple(sid, dwi, bval, bvec)} EXTRACT_B0(extract_b0_channel) + } emit: dwi_bval_bvec = extract_b0_channel @@ -96,15 +116,21 @@ workflow ANAT { N4_T1(DENOISE_T1.out.t1_denoised) // ** Resampling ** // - RESAMPLE_T1(N4_T1.out.t1_n4) - // ** Resample if -profile infant ** // - RESAMPLE_ANAT(anat_channel) + if ( params.infant_config ) { + // ** Resample if -profile infant ** // + RESAMPLE_ANAT(anat_channel) + } else { + RESAMPLE_T1(N4_T1.out.t1_n4) + } // ** Bet ** // - BET_T1(RESAMPLE_T1.out.t1_resampled) - // ** Bet if -profile infant ** // - BET_T2(RESAMPLE_ANAT.out.t2w_and_mask.map{ [it[0], it[1]] }) - + if ( params.infant_config ) { + // ** Bet if -profile infant ** // + BET_T2(RESAMPLE_ANAT.out.t2w_and_mask.map{ [it[0], it[1]] }) + } else { + BET_T1(RESAMPLE_T1.out.t1_resampled) + } + // ** Crop ** // if ( params.infant_config ) { crop_channel = BET_T2.out.t2_bet diff --git a/nextflow.config b/nextflow.config index 38cb395..16d7472 100644 --- a/nextflow.config +++ b/nextflow.config @@ -1,5 +1,5 @@ process { - publishDir = {"./Results_Infant_Tracking/$sid/$task.process"} + publishDir = {"./Results_ChildBrainFlow/$sid/$task.process"} scratch = true errorStrategy = { task.attempt <= 3 ? 'retry' : 'ignore' } maxRetries = 3 @@ -20,6 +20,7 @@ params { //** Global Options **// b0_thr = 10 dwi_shell_tolerance = 20 + skip_dwi_preprocessing = false template_t1 = "/human-data/mni_152_sym_09c/t1" //** BET DWI Options **// @@ -78,6 +79,9 @@ params { set_frf = true manual_frf = "15,4,4" + // ** Segment Tissues Options ** // + number_of_tissues = 3 + //** PFT Seeding and Tracking Options **// run_pft_tracking = true pft_compress_streamlines = true @@ -162,6 +166,7 @@ params { output_dir = false // Profiles Options + run_freesurfer = false run_tracking = false run_connectomics = false infant_config = false @@ -170,8 +175,27 @@ params { // Template Options // references = "./references/" - Mean_FRF_Publish_Dir = "./Results_Infant_Tracking/Mean_FRF" - Pop_Avg_Publish_Dir = "./Results_Infant_Tracking/Pop_Avg" + Mean_FRF_Publish_Dir = "./Results_ChildBrainFlow/Mean_FRF" + Pop_Avg_Publish_Dir = "./Results_ChildBrainFlow/Pop_Avg" + + // ** FreeSurfer Options ** // + use_freesurfer_atlas = false + use_brainnetome_atlas = true + use_glasser_atlas = false + use_schaefer_100_atlas = false + use_schaefer_200_atlas = false + use_schaefer_400_atlas = false + use_lausanne_1_atlas = false + use_lausanne_2_atlas = false + use_lausanne_3_atlas = false + use_lausanne_4_atlas = false + use_lausanne_5_atlas = false + use_dilated_labels = false + nb_threads = 4 + atlas_utils_folder = "/FS_BN_GL_SF_utils/" + compute_FS_BN_GL_SF = true + compute_lausanne_multiscale = true + compute_lobes = false } if(params.output_dir) { @@ -198,7 +222,7 @@ singularity.autoMounts = true profiles { no_symlink { process{ - publishDir = [path: {"./Results_Infant_Tracking/$sid/$task.process"}, mode: 'copy'] + publishDir = [path: {"./Results_ChildBrainFlow/$sid/$task.process"}, mode: 'copy'] } } @@ -206,6 +230,10 @@ profiles { process.scratch="/tmp" } + freesurfer { + params.run_freesurfer = true + } + tracking { params.run_tracking = true } From 5c34aa5255ae76a3d6ba5e8d563490c1b333b4a1 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 26 Oct 2023 18:20:03 -0400 Subject: [PATCH 17/54] fix freesurfer, add docker and singularity files --- .github/workflows/ci.yml | 15 ++-- containers/Dockerfile | 40 +++++++++++ containers/license.txt | 4 ++ containers/singularity_recipe.def | 68 +++++++++++++++++++ main.nf | 4 +- modules/freesurfer/processes/atlases.nf | 14 ++-- modules/freesurfer/processes/freesurfer.nf | 2 +- .../freesurfer/workflows/freesurferflow.nf | 5 +- modules/io.nf | 3 +- .../tracking/processes/tracking_processes.nf | 11 +-- nextflow.config | 2 +- 11 files changed, 144 insertions(+), 24 deletions(-) create mode 100644 containers/Dockerfile create mode 100644 containers/license.txt create mode 100644 containers/singularity_recipe.def diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cf9cee6..9605596 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,10 +6,17 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - uses: eWaterCycle/setup-apptainer@v2.0.0 - uses: nf-core/setup-nextflow@v1 - - name: Pull docker image + - name: Build Docker Image. run: | - docker pull scilus/scilus:latest - - name: Run pipeline + docker build -t childbrainflow_image ${GITHUB_WORKSPACE}/containers/. + - name: Build Singularity Image. run: | - nextflow run ${GITHUB_WORKSPACE} --help -with-docker scilus/docker-tractoflow:latest \ No newline at end of file + singularity build childbrainflow_image.sif ${GITHUB_WORKSPACE}/containers/singularity_recipe.def + - name: Run pipeline with Docker + run: | + nextflow run ${GITHUB_WORKSPACE} --help -with-docker childbrainflow_image + - name: Run pipeline with Singularity + run: | + nextflow run ${GITHUB_WORKSPACE} --help -with-singularity childbrainflow_image.sif \ No newline at end of file diff --git a/containers/Dockerfile b/containers/Dockerfile new file mode 100644 index 0000000..57a5a5f --- /dev/null +++ b/containers/Dockerfile @@ -0,0 +1,40 @@ +FROM scilus/scilus:1.5.0 + +LABEL version="ChildBrainFlow-1.0.0" + +# Installing freesurfer on top of scilus:1.5.0 +WORKDIR /root +RUN wget https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.2.0/freesurfer-linux-centos7_x86_64-7.2.0.tar.gz -O fs.tar.gz && \ + tar --no-same-owner -xzvf fs.tar.gz && \ + mv freesurfer /usr/local && \ + rm fs.tar.gz +RUN apt-get update && \ + apt-get install csh tcsh + +# Setup freesurfer env +ENV OS Linux +ENV PATH /usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +ENV FREESURFER_HOME /usr/local/freesurfer +ENV FREESURFER /usr/local/freesurfer +ENV SUBJECTS_DIR /usr/local/freesurfer/subjects +ENV LOCAL_DIR /usr/local/freesurfer/local +ENV FSFAST_HOME /usr/local/freesurfer/fsfast +ENV FMRI_ANALYSIS_DIR /usr/local/freesurfer/fsfast +ENV FUNCTIONALS_DIR /usr/local/freesurfer/sessions + +# set default fs options +ENV FS_OVERRIDE 0 +ENV FIX_VERTEX_AREA "" +ENV FSF_OUTPUT_FORMAT nii.gz + +# mni env requirements +ENV MINC_BIN_DIR /usr/local/freesurfer/mni/bin +ENV MINC_LIB_DIR /usr/local/freesurfer/mni/lib +ENV MNI_DIR /usr/local/freesurfer/mni +ENV MNI_DATAPATH /usr/local/freesurfer/mni/data +ENV MNI_PERL5LIB /usr/local/freesurfer/mni/share/perl5 +ENV PERL5LIB /usr/local/freesurfer/mni/share/perl5 + +ADD https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 /freesurfer/license.txt +ADD https://www.dropbox.com/s/zzptxby9pf5lpih/FS_BN_GL_SF_utils.tar?dl=0?download=1 /root/ +RUN rm FS_BN_GL_SF_utils.tar \ No newline at end of file diff --git a/containers/license.txt b/containers/license.txt new file mode 100644 index 0000000..b05980a --- /dev/null +++ b/containers/license.txt @@ -0,0 +1,4 @@ +anthony.gagnon7@usherbrooke.ca +50632 + *C/gExRm8XAxE + FSe2IOWbYUV/E diff --git a/containers/singularity_recipe.def b/containers/singularity_recipe.def new file mode 100644 index 0000000..6e32e38 --- /dev/null +++ b/containers/singularity_recipe.def @@ -0,0 +1,68 @@ +Bootstrap: docker +From: scilus/scilus:1.5.0 + +%labels + version ChildBrainFlow-1.0.0 + +%post + # Installing FreeSurfer on top of scilus:1.5.0 + cd /root + wget https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.2.0/freesurfer-linux-centos7_x86_64-7.2.0.tar.gz -O fs.tar.gz + tar --no-same-owner -xzvf fs.tar.gz + mv freesurfer /usr/local + rm fs.tar.gz + + # Install required tcsh and csh. + apt-get update + apt-get install -y csh tcsh + + # Setup FreeSurfer environment + export OS=Linux + export PATH=/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + export FREESURFER_HOME=/usr/local/freesurfer + export FREESURFER=/usr/local/freesurfer + export SUBJECTS_DIR=/usr/local/freesurfer/subjects + export LOCAL_DIR=/usr/local/freesurfer/local + export FSFAST_HOME=/usr/local/freesurfer/fsfast + export FMRI_ANALYSIS_DIR=/usr/local/freesurfer/fsfast + export FUNCTIONALS_DIR=/usr/local/freesurfer/sessions + + # Set default FreeSurfer options + export FS_OVERRIDE=0 + export FIX_VERTEX_AREA="" + export FSF_OUTPUT_FORMAT=nii.gz + + # Set MNI environment requirements + export MINC_BIN_DIR=/usr/local/freesurfer/mni/bin + export MINC_LIB_DIR=/usr/local/freesurfer/mni/lib + export MNI_DIR=/usr/local/freesurfer/mni + export MNI_DATAPATH=/usr/local/freesurfer/mni/data + export MNI_PERL5LIB=/usr/local/freesurfer/mni/share/perl5 + export PERL5LIB=/usr/local/freesurfer/mni/share/perl5 + + + # Download additional files + wget -O /freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 + wget -O /root/FS_BN_GL_SF_utils.tar https://www.dropbox.com/s/zzptxby9pf5lpih/FS_BN_GL_SF_utils.tar?dl=0?download=1 + tar -xzvf FS_BN_GL_SF_utils.tar + rm FS_BN_GL_SF_utils.tar + +%environment + export OS=Linux + export PATH=/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + export FREESURFER_HOME=/usr/local/freesurfer + export FREESURFER=/usr/local/freesurfer + export SUBJECTS_DIR=/usr/local/freesurfer/subjects + export LOCAL_DIR=/usr/local/freesurfer/local + export FSFAST_HOME=/usr/local/freesurfer/fsfast + export FMRI_ANALYSIS_DIR=/usr/local/freesurfer/fsfast + export FUNCTIONALS_DIR=/usr/local/freesurfer/sessions + export FS_OVERRIDE=0 + export FIX_VERTEX_AREA="" + export FSF_OUTPUT_FORMAT=nii.gz + export MINC_BIN_DIR=/usr/local/freesurfer/mni/bin + export MINC_LIB_DIR=/usr/local/freesurfer/mni/lib + export MNI_DIR=/usr/local/freesurfer/mni + export MNI_DATAPATH=/usr/local/freesurfer/mni/data + export MNI_PERL5LIB=/usr/local/freesurfer/mni/share/perl5 + export PERL5LIB=/usr/local/freesurfer/mni/share/perl5 diff --git a/main.nf b/main.nf index bc23248..383f5ed 100644 --- a/main.nf +++ b/main.nf @@ -43,7 +43,7 @@ workflow { data.fa_ref) } - if ( params.freesurfer ) { + if ( params.run_freesurfer ) { data = get_data_freesurfer() FREESURFERFLOW(data.anat) @@ -110,8 +110,8 @@ workflow { // ** Fetching labels from freesurferflow if -profile freesurfer is used, if not, ** // // ** fetching it from input files. ** // + input = file(params.input) if ( !params.run_freesurfer ) { - input = file(params.input) labels = Channel.fromFilePairs("$input/**/*labels.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } } else { diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index 075280c..17cc5e0 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -75,14 +75,14 @@ process LAUSANNE { input: tuple val(sid), path(folder) - each path(scale) + each scale output: - tuple val(sid), path("[lausanne_2008_scale_1]*.nii.gz"), lausanne_1 - tuple val(sid), path("[lausanne_2008_scale_2]*.nii.gz"), lausanne_2 - tuple val(sid), path("[lausanne_2008_scale_3]*.nii.gz"), lausanne_3 - tuple val(sid), path("[lausanne_2008_scale_4]*.nii.gz"), lausanne_4 - tuple val(sid), path("[lausanne_2008_scale_5]*.nii.gz"), lausanne_5 + tuple val(sid), path("[lausanne_2008_scale_1]*.nii.gz"), emit: lausanne_1 + tuple val(sid), path("[lausanne_2008_scale_2]*.nii.gz"), emit: lausanne_2 + tuple val(sid), path("[lausanne_2008_scale_3]*.nii.gz"), emit: lausanne_3 + tuple val(sid), path("[lausanne_2008_scale_4]*.nii.gz"), emit: lausanne_4 + tuple val(sid), path("[lausanne_2008_scale_5]*.nii.gz"), emit: lausanne_5 path("*.txt") path("*.json") @@ -93,7 +93,7 @@ process LAUSANNE { """ ln -s $params.atlas_utils_folder/fsaverage \$(dirname ${folder})/ freesurfer_home=\$(dirname \$(dirname \$(which mri_label2vol))) - python3.7 $params.atlas_utils_folder/lausanne_multi_scale_atlas/generate_multiscale_parcellation.py \ + python $params.atlas_utils_folder/lausanne_multi_scale_atlas/generate_multiscale_parcellation.py \ \$(dirname ${folder}) ${sid} \$freesurfer_home --scale ${scale} --dilation_factor 0 --log_level DEBUG mri_convert ${folder}/mri/rawavg.mgz rawavg.nii.gz diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index e11e987..304e4b8 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -8,7 +8,7 @@ process FREESURFER { input: tuple val(sid), path(anat) output: - tuple val(sid), "$sid/", emit: folders + tuple val(sid), path("$sid/"), emit: folders script: """ diff --git a/modules/freesurfer/workflows/freesurferflow.nf b/modules/freesurfer/workflows/freesurferflow.nf index 777df93..bb21704 100644 --- a/modules/freesurfer/workflows/freesurferflow.nf +++ b/modules/freesurfer/workflows/freesurferflow.nf @@ -28,9 +28,8 @@ workflow FREESURFERFLOW { // ** Computing lausanne atlas ** // scales = Channel.from(1,2,3,4,5) - lausanne_channel = FREESURFER.out.folders - .combine(scales, by: 0) - LAUSANNE(FREESURFER.out.folders) + LAUSANNE(FREESURFER.out.folders, + scales) // ** Work out a way for the user to select which atlas to use. ** // // ** Could be cleaner than a bunch of if statements in the future. ** // diff --git a/modules/io.nf b/modules/io.nf index 9099ce0..9d87df2 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -28,7 +28,8 @@ workflow get_data_freesurfer { input = file(params.input) // ** Loading files ** // - anat_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) + anat_channel = Channel.fromPath("$input/**/*t1.nii.gz") + .map{ch1 -> [ch1.parent.name, ch1]} emit: anat = anat_channel diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index baa4880..8c03282 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -76,7 +76,7 @@ process LOCAL_TRACKING_MASK { export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - mrcalc $fa $params.local_fa_tracking_mask_threshold -ge ${sid}__local_tracking_mask.nii.gz\ + mrcalc $fa $params.local_fa_tracking_mask_thr -ge ${sid}__local_tracking_mask.nii.gz\ -datatype uint8 """ } @@ -102,7 +102,7 @@ process LOCAL_SEEDING_MASK { export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - mrcalc $fa $params.local_fa_seeding_mask_threshold -ge ${sid}__local_seeding_mask.nii.gz\ + mrcalc $fa $params.local_fa_seeding_mask_thr -ge ${sid}__local_seeding_mask.nii.gz\ -datatype uint8 """ } @@ -151,7 +151,7 @@ process PFT_SEEDING_MASK { export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - scil_image_math.py union $wm, $interface_mask ${sid}__pft_seeding_mask.nii.gz\ + scil_image_math.py union $wm $interface_mask ${sid}__pft_seeding_mask.nii.gz\ --data_type uint8 """ else if (params.pft_seeding_mask_type == "interface") @@ -163,7 +163,7 @@ process PFT_SEEDING_MASK { export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - mrcalc $fa $params.pft_fa_seeding_mask_threshold -ge ${sid}__pft_seeding_mask.nii.gz\ + mrcalc $fa $params.pft_fa_seeding_mask_thr -ge ${sid}__pft_seeding_mask.nii.gz\ -datatype uint8 """ } @@ -198,6 +198,7 @@ process PFT_TRACKING { input: tuple val(sid), path(fodf), path(include), path(exclude), path(seed) + output: tuple val(sid), path("${sid}__pft_tracking.trk"), emit: tractogram when: @@ -212,7 +213,7 @@ process PFT_TRACKING { scil_compute_pft.py $fodf $seed $include $exclude\ tmp.trk\ --algo $params.pft_algo --$params.pft_seeding $params.pft_nbr_seeds\ - --seed $curr_seed --step $params.step_size --theta $params.theta\ + --seed $params.pft_random_seed --step $params.pft_step_size --theta $params.pft_theta\ --sfthres $params.pft_sfthres --sfthres_init $params.pft_sfthres_init\ --min_length $params.pft_min_len --max_length $params.pft_max_len\ --particles $params.pft_particles --back $params.pft_back\ diff --git a/nextflow.config b/nextflow.config index 16d7472..4d78dba 100644 --- a/nextflow.config +++ b/nextflow.config @@ -90,7 +90,7 @@ params { pft_fa_seeding_mask_thr = 0.1 pft_algo = "prob" - pft_nb_seeds = 10 + pft_nbr_seeds = 10 pft_seeding = "npv" pft_step_size = 0.5 pft_theta = 20 From 7e3ba9011db4cddeb0d6c18c69f29c49128f1061 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 26 Oct 2023 18:41:49 -0400 Subject: [PATCH 18/54] fix test --- .github/workflows/ci.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9605596..82988f9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,6 +5,14 @@ jobs: pipeline-compilation: runs-on: ubuntu-latest steps: + - name: Maximize build space + uses: easimon/maximize-build-space@master + with: + root-reserve-mb: 512 + swap-size-mb: 1024 + remove-codeql: 'true' + remove-android: 'true' + remove-dotnet: 'true' - uses: actions/checkout@v4 - uses: eWaterCycle/setup-apptainer@v2.0.0 - uses: nf-core/setup-nextflow@v1 From 205b1b85701677b6e138993764b0c9fe5f42a93e Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 26 Oct 2023 19:04:28 -0400 Subject: [PATCH 19/54] test --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 82988f9..3d6e725 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,7 @@ jobs: remove-codeql: 'true' remove-android: 'true' remove-dotnet: 'true' - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - uses: eWaterCycle/setup-apptainer@v2.0.0 - uses: nf-core/setup-nextflow@v1 - name: Build Docker Image. @@ -21,7 +21,7 @@ jobs: docker build -t childbrainflow_image ${GITHUB_WORKSPACE}/containers/. - name: Build Singularity Image. run: | - singularity build childbrainflow_image.sif ${GITHUB_WORKSPACE}/containers/singularity_recipe.def + apptainer build childbrainflow_image.sif ${GITHUB_WORKSPACE}/containers/singularity_recipe.def - name: Run pipeline with Docker run: | nextflow run ${GITHUB_WORKSPACE} --help -with-docker childbrainflow_image From 208bb4165d54df7a3d757bd37a7abc4eb78ebfa4 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 26 Oct 2023 19:16:47 -0400 Subject: [PATCH 20/54] try fix space issues on runners --- .github/workflows/ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3d6e725..0062e46 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,9 +16,11 @@ jobs: - uses: actions/checkout@v3 - uses: eWaterCycle/setup-apptainer@v2.0.0 - uses: nf-core/setup-nextflow@v1 + - name: Docker Setup Buildx + uses: docker/setup-buildx-action@v3.0.0 - name: Build Docker Image. run: | - docker build -t childbrainflow_image ${GITHUB_WORKSPACE}/containers/. + docker buildx build -t childbrainflow_image ${GITHUB_WORKSPACE}/containers/. - name: Build Singularity Image. run: | apptainer build childbrainflow_image.sif ${GITHUB_WORKSPACE}/containers/singularity_recipe.def From a045bbc2a6862b45056919932023553f5cd5a918 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 26 Oct 2023 21:16:16 -0400 Subject: [PATCH 21/54] fix singularity and back to nextflow testing --- .github/workflows/ci.yml | 26 +++----------------------- containers/singularity_recipe.def | 2 +- 2 files changed, 4 insertions(+), 24 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0062e46..8dd8fee 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,28 +5,8 @@ jobs: pipeline-compilation: runs-on: ubuntu-latest steps: - - name: Maximize build space - uses: easimon/maximize-build-space@master - with: - root-reserve-mb: 512 - swap-size-mb: 1024 - remove-codeql: 'true' - remove-android: 'true' - remove-dotnet: 'true' - - uses: actions/checkout@v3 - - uses: eWaterCycle/setup-apptainer@v2.0.0 + - uses: actions/checkout@v4 - uses: nf-core/setup-nextflow@v1 - - name: Docker Setup Buildx - uses: docker/setup-buildx-action@v3.0.0 - - name: Build Docker Image. + - name: Run pipeline run: | - docker buildx build -t childbrainflow_image ${GITHUB_WORKSPACE}/containers/. - - name: Build Singularity Image. - run: | - apptainer build childbrainflow_image.sif ${GITHUB_WORKSPACE}/containers/singularity_recipe.def - - name: Run pipeline with Docker - run: | - nextflow run ${GITHUB_WORKSPACE} --help -with-docker childbrainflow_image - - name: Run pipeline with Singularity - run: | - nextflow run ${GITHUB_WORKSPACE} --help -with-singularity childbrainflow_image.sif \ No newline at end of file + nextflow run ${GITHUB_WORKSPACE} --help \ No newline at end of file diff --git a/containers/singularity_recipe.def b/containers/singularity_recipe.def index 6e32e38..8228d51 100644 --- a/containers/singularity_recipe.def +++ b/containers/singularity_recipe.def @@ -44,7 +44,7 @@ From: scilus/scilus:1.5.0 # Download additional files wget -O /freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 wget -O /root/FS_BN_GL_SF_utils.tar https://www.dropbox.com/s/zzptxby9pf5lpih/FS_BN_GL_SF_utils.tar?dl=0?download=1 - tar -xzvf FS_BN_GL_SF_utils.tar + tar -xvf FS_BN_GL_SF_utils.tar rm FS_BN_GL_SF_utils.tar %environment From d8b05cd967ad320d252c492a63775fa352584faf Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Fri, 27 Oct 2023 10:52:07 -0400 Subject: [PATCH 22/54] fix fast in docker and atlases --- .vscode/extensions.json | 5 -- containers/Dockerfile | 46 +++++++++---------- containers/singularity_recipe.def | 21 +++++---- modules/freesurfer/processes/atlases.nf | 3 +- .../tracking/processes/tracking_processes.nf | 2 +- nextflow.config | 2 +- 6 files changed, 38 insertions(+), 41 deletions(-) delete mode 100644 .vscode/extensions.json diff --git a/.vscode/extensions.json b/.vscode/extensions.json deleted file mode 100644 index 5de48d8..0000000 --- a/.vscode/extensions.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "recommendations": [ - "nf-core.nf-core-extensionpack" - ] -} \ No newline at end of file diff --git a/containers/Dockerfile b/containers/Dockerfile index 57a5a5f..b7d0db2 100644 --- a/containers/Dockerfile +++ b/containers/Dockerfile @@ -1,4 +1,4 @@ -FROM scilus/scilus:1.5.0 +FROM scilus/scilus-flows:1.5.0 LABEL version="ChildBrainFlow-1.0.0" @@ -10,31 +10,31 @@ RUN wget https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.2.0/freesurfer rm fs.tar.gz RUN apt-get update && \ apt-get install csh tcsh +ADD https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 /root/license.txt +ADD https://www.dropbox.com/s/zzptxby9pf5lpih/FS_BN_GL_SF_utils.tar?dl=0?download=1 /root/ +RUN rm FS_BN_GL_SF_utils.tar # Setup freesurfer env -ENV OS Linux -ENV PATH /usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin -ENV FREESURFER_HOME /usr/local/freesurfer -ENV FREESURFER /usr/local/freesurfer -ENV SUBJECTS_DIR /usr/local/freesurfer/subjects -ENV LOCAL_DIR /usr/local/freesurfer/local -ENV FSFAST_HOME /usr/local/freesurfer/fsfast -ENV FMRI_ANALYSIS_DIR /usr/local/freesurfer/fsfast -ENV FUNCTIONALS_DIR /usr/local/freesurfer/sessions +ENV OS=Linux +ENV PATH=${PATH}:/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +ENV FREESURFER_HOME=/usr/local/freesurfer +ENV FREESURFER=/usr/local/freesurfer +ENV SUBJECTS_DIR=/usr/local/freesurfer/subjects +ENV LOCAL_DIR=/usr/local/freesurfer/local +ENV FSFAST_HOME=/usr/local/freesurfer/fsfast +ENV FMRI_ANALYSIS_DIR=/usr/local/freesurfer/fsfast +ENV FUNCTIONALS_DIR=/usr/local/freesurfer/sessions +ENV FS_LICENSE=/root/license.txt # set default fs options -ENV FS_OVERRIDE 0 -ENV FIX_VERTEX_AREA "" -ENV FSF_OUTPUT_FORMAT nii.gz +ENV FS_OVERRIDE=0 +ENV FIX_VERTEX_AREA="" +ENV FSF_OUTPUT_FORMAT=nii.gz # mni env requirements -ENV MINC_BIN_DIR /usr/local/freesurfer/mni/bin -ENV MINC_LIB_DIR /usr/local/freesurfer/mni/lib -ENV MNI_DIR /usr/local/freesurfer/mni -ENV MNI_DATAPATH /usr/local/freesurfer/mni/data -ENV MNI_PERL5LIB /usr/local/freesurfer/mni/share/perl5 -ENV PERL5LIB /usr/local/freesurfer/mni/share/perl5 - -ADD https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 /freesurfer/license.txt -ADD https://www.dropbox.com/s/zzptxby9pf5lpih/FS_BN_GL_SF_utils.tar?dl=0?download=1 /root/ -RUN rm FS_BN_GL_SF_utils.tar \ No newline at end of file +ENV MINC_BIN_DIR=/usr/local/freesurfer/mni/bin +ENV MINC_LIB_DIR=/usr/local/freesurfer/mni/lib +ENV MNI_DIR=/usr/local/freesurfer/mni +ENV MNI_DATAPATH=/usr/local/freesurfer/mni/data +ENV MNI_PERL5LIB=/usr/local/freesurfer/mni/share/perl5 +ENV PERL5LIB=/usr/local/freesurfer/mni/share/perl5 \ No newline at end of file diff --git a/containers/singularity_recipe.def b/containers/singularity_recipe.def index 8228d51..6067022 100644 --- a/containers/singularity_recipe.def +++ b/containers/singularity_recipe.def @@ -1,5 +1,5 @@ Bootstrap: docker -From: scilus/scilus:1.5.0 +From: scilus/scilus-flows:1.5.0 %labels version ChildBrainFlow-1.0.0 @@ -16,9 +16,15 @@ From: scilus/scilus:1.5.0 apt-get update apt-get install -y csh tcsh + # Download additional files + wget -O /root/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 + wget -O /root/FS_BN_GL_SF_utils.tar https://www.dropbox.com/s/zzptxby9pf5lpih/FS_BN_GL_SF_utils.tar?dl=0?download=1 + tar -xvf FS_BN_GL_SF_utils.tar + rm FS_BN_GL_SF_utils.tar + # Setup FreeSurfer environment export OS=Linux - export PATH=/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + export PATH=${PATH}:/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin export FREESURFER_HOME=/usr/local/freesurfer export FREESURFER=/usr/local/freesurfer export SUBJECTS_DIR=/usr/local/freesurfer/subjects @@ -26,6 +32,7 @@ From: scilus/scilus:1.5.0 export FSFAST_HOME=/usr/local/freesurfer/fsfast export FMRI_ANALYSIS_DIR=/usr/local/freesurfer/fsfast export FUNCTIONALS_DIR=/usr/local/freesurfer/sessions + export FS_LICENSE=/root/license.txt # Set default FreeSurfer options export FS_OVERRIDE=0 @@ -40,16 +47,9 @@ From: scilus/scilus:1.5.0 export MNI_PERL5LIB=/usr/local/freesurfer/mni/share/perl5 export PERL5LIB=/usr/local/freesurfer/mni/share/perl5 - - # Download additional files - wget -O /freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 - wget -O /root/FS_BN_GL_SF_utils.tar https://www.dropbox.com/s/zzptxby9pf5lpih/FS_BN_GL_SF_utils.tar?dl=0?download=1 - tar -xvf FS_BN_GL_SF_utils.tar - rm FS_BN_GL_SF_utils.tar - %environment export OS=Linux - export PATH=/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + export PATH=${PATH}:/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin export FREESURFER_HOME=/usr/local/freesurfer export FREESURFER=/usr/local/freesurfer export SUBJECTS_DIR=/usr/local/freesurfer/subjects @@ -66,3 +66,4 @@ From: scilus/scilus:1.5.0 export MNI_DATAPATH=/usr/local/freesurfer/mni/data export MNI_PERL5LIB=/usr/local/freesurfer/mni/share/perl5 export PERL5LIB=/usr/local/freesurfer/mni/share/perl5 + export FS_LICENSE=/root/license.txt diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index 17cc5e0..f4f0dae 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -9,8 +9,9 @@ process FS_BN_GL_SF { tuple val(sid), path(folder) output: - tuple val(sid), path("*[brainnetome]*.nii.gz"), emit: brainnetome tuple val(sid), path("*[freesurfer]*.nii.gz"), emit: freesurfer + tuple val(sid), path("*[brainnetome]*.nii.gz"), emit: brainnetome + tuple val(sid), path("*[glasser]*.nii.gz"), emit: glasser tuple val(sid), path("*[schaefer_100]*.nii.gz"), emit: schaefer_100 tuple val(sid), path("*[schaefer_200]*.nii.gz"), emit: schaefer_200 tuple val(sid), path("*[schaefer_400]*.nii.gz"), emit: schaefer_400 diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index 8c03282..cb082db 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -125,7 +125,7 @@ process LOCAL_TRACKING { export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 scil_compute_local_tracking.py $fodf $seeding_mask $tracking_mask\ - tmp.trk --algo $params.local_algo --$params.local_seeding $params.local_nb_seeds\ + tmp.trk --algo $params.local_algo --$params.local_seeding $params.local_nbr_seeds\ --seed $params.local_tracking_seed --step $params.local_step_size --theta $params.local_theta\ --sfthres $params.local_sfthres --min_length $params.local_min_len\ --max_length $params.local_max_len $compress --sh_basis $params.basis diff --git a/nextflow.config b/nextflow.config index 4d78dba..3b82a3b 100644 --- a/nextflow.config +++ b/nextflow.config @@ -114,7 +114,7 @@ params { local_seeding_mask_type = "wm" local_algo = "prob" - local_nb_seeds = 10 + local_nbr_seeds = 10 local_seeding = "npv" local_step_size = 0.5 local_theta = 20 From 75a8039668d131e795e5de43667080e843f8cc24 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 30 Oct 2023 17:20:21 -0400 Subject: [PATCH 23/54] fixing utils download in container --- containers/Dockerfile | 14 ++++++++++---- containers/singularity_recipe.def | 17 ++++++++++++----- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/containers/Dockerfile b/containers/Dockerfile index b7d0db2..cc8afc1 100644 --- a/containers/Dockerfile +++ b/containers/Dockerfile @@ -2,17 +2,23 @@ FROM scilus/scilus-flows:1.5.0 LABEL version="ChildBrainFlow-1.0.0" +RUN wget https://www.dropbox.com/scl/fi/izy3yie5cdqqi7my2ogfa/FS_BN_GL_SF_utils.tar.gz?rlkey=kt8qi4ye7h3om3ymnq0n0h570&dl=0 -O FS_BN_GL_SF_utils.tar.gz && \ + tar -xvf FS_BN_GL_SF_utils.tar && \ + rm FS_BN_GL_SF_utils.tar + # Installing freesurfer on top of scilus:1.5.0 WORKDIR /root RUN wget https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.2.0/freesurfer-linux-centos7_x86_64-7.2.0.tar.gz -O fs.tar.gz && \ tar --no-same-owner -xzvf fs.tar.gz && \ mv freesurfer /usr/local && \ rm fs.tar.gz +COPY license.txt /usr/local/freesurfer/license.txt RUN apt-get update && \ apt-get install csh tcsh -ADD https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 /root/license.txt -ADD https://www.dropbox.com/s/zzptxby9pf5lpih/FS_BN_GL_SF_utils.tar?dl=0?download=1 /root/ -RUN rm FS_BN_GL_SF_utils.tar +RUN wget http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 \ + tar xjf parallel-latest.tar.bz2 \ + cd parallel-* && ./configure && make && make install \ + echo 'will cite' | parallel --citation 1> /dev/null 2> /dev/null & # Setup freesurfer env ENV OS=Linux @@ -24,7 +30,7 @@ ENV LOCAL_DIR=/usr/local/freesurfer/local ENV FSFAST_HOME=/usr/local/freesurfer/fsfast ENV FMRI_ANALYSIS_DIR=/usr/local/freesurfer/fsfast ENV FUNCTIONALS_DIR=/usr/local/freesurfer/sessions -ENV FS_LICENSE=/root/license.txt +ENV FS_LICENSE=/usr/local/freesurfer/license.txt # set default fs options ENV FS_OVERRIDE=0 diff --git a/containers/singularity_recipe.def b/containers/singularity_recipe.def index 6067022..204f26a 100644 --- a/containers/singularity_recipe.def +++ b/containers/singularity_recipe.def @@ -17,10 +17,17 @@ From: scilus/scilus-flows:1.5.0 apt-get install -y csh tcsh # Download additional files - wget -O /root/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 - wget -O /root/FS_BN_GL_SF_utils.tar https://www.dropbox.com/s/zzptxby9pf5lpih/FS_BN_GL_SF_utils.tar?dl=0?download=1 - tar -xvf FS_BN_GL_SF_utils.tar + wget -O /usr/local/freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 + wget -O FS_BN_GL_SF_utils.tar.gz https://www.dropbox.com/scl/fi/izy3yie5cdqqi7my2ogfa/FS_BN_GL_SF_utils.tar.gz?rlkey=kt8qi4ye7h3om3ymnq0n0h570&dl=0 + tar -xvf FS_BN_GL_SF_utils.tar -C ${APPTAINER_ROOTFS}/ rm FS_BN_GL_SF_utils.tar + + # Setup parallel + wget http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 + tar xjf parallel-latest.tar.bz2 + cd parallel-* && ./configure + make && make install + echo 'will cite' | parallel --citation 1> /dev/null 2> /dev/null & # Setup FreeSurfer environment export OS=Linux @@ -32,7 +39,7 @@ From: scilus/scilus-flows:1.5.0 export FSFAST_HOME=/usr/local/freesurfer/fsfast export FMRI_ANALYSIS_DIR=/usr/local/freesurfer/fsfast export FUNCTIONALS_DIR=/usr/local/freesurfer/sessions - export FS_LICENSE=/root/license.txt + export FS_LICENSE=/usr/local/freesurfer/license.txt # Set default FreeSurfer options export FS_OVERRIDE=0 @@ -66,4 +73,4 @@ From: scilus/scilus-flows:1.5.0 export MNI_DATAPATH=/usr/local/freesurfer/mni/data export MNI_PERL5LIB=/usr/local/freesurfer/mni/share/perl5 export PERL5LIB=/usr/local/freesurfer/mni/share/perl5 - export FS_LICENSE=/root/license.txt + export FS_LICENSE=/usr/local/freesurfer/license.txt From ab37b892c3fe84d9d65afe73f257e55175ec7669 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 30 Oct 2023 17:20:41 -0400 Subject: [PATCH 24/54] fix typos and bindings --- main.nf | 4 ++-- modules/freesurfer/processes/atlases.nf | 17 +++++++++-------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/main.nf b/main.nf index 383f5ed..060a793 100644 --- a/main.nf +++ b/main.nf @@ -258,7 +258,7 @@ def display_usage () { "pft_seeding_mask_type":"$params.pft_seeding_mask_type", "pft_fa_seeding_mask_thr":"$params.pft_fa_seeding_mask_thr", "pft_algo":"$params.pft_algo", - "pft_nb_seeds":"$params.pft_nb_seeds", + "pft_nbr_seeds":"$params.pft_nbr_seeds", "pft_seeding":"$params.pft_seeding", "pft_step_size":"$params.pft_step_size", "pft_theta":"$params.pft_theta", @@ -279,7 +279,7 @@ def display_usage () { "local_tracking_mask_type":"$params.local_tracking_mask_type", "local_algo":"$params.local_algo", "local_seeding":"$params.local_seeding", - "local_nb_seeds":"$params.local_nb_seeds", + "local_nbr_seeds":"$params.local_nbr_seeds", "local_tracking_seed":"$params.local_tracking_seed", "local_step_size":"$params.local_step_size", "local_theta":"$params.local_theta", diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index f4f0dae..29c600a 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -58,15 +58,16 @@ process LOBES { scil_reshape_to_reference.py brain_mask.nii.gz rawavg.nii.gz brain_mask.nii.gz --interpolation nearest -f scil_image_math.py convert brain_mask.nii.gz brain_mask.nii.gz --data_type uint8 -f - scil_combine_labels.py atlas_lobes_v5.nii.gz -v wmparc.nii.gz 1003 1012 1014 1017 1018 1019 1020 1024 1027 1028 \ - 1032 -v wmparc.nii.gz 1008 1022 1025 1029 1031 -v wmparc.nii.gz 1005 1011 1013 1021 -v wmparc.nii.gz 1001 \ - 1006 1007 1009 1015 1015 1030 1033 -v wmparc.nii.gz 1002 1010 1023 1026 -v wmparc.nii.gz 8 -v wmparc.nii.gz \ - 10 11 12 13 17 18 26 28 -v wmparc.nii.gz 2003 2012 2014 2017 2018 2019 2020 2024 2027 2028 2032 \ - -v wmparc.nii.gz 2008 2022 2025 2029 2031 -v wmparc.nii.gz 2005 2011 2013 2021 -v wmparc.nii.gz 2001 2006 \ - 2007 2009 2015 2015 2030 2033 -v wmparc.nii.gz 2002 2010 2023 2026 -v wmparc.nii.gz 49 50 51 52 53 54 58 60 \ - -v wmparc.nii.gz 47 -v wmparc.nii.gz 16 --merge + scil_combine_labels.py atlas_lobes_v5.nii.gz --volume_ids wmparc.nii.gz 1003 1012 1014 1017 1018 1019 1020 \ + 1024 1027 1028 1032 --volume_ids wmparc.nii.gz 1008 1022 1025 1029 1031 --volume_ids wmparc.nii.gz 1005 \ + 1011 1013 1021 --volume_ids wmparc.nii.gz 1001 1006 1007 1009 1015 1015 1030 1033 --volume_ids wmparc.nii.gz \ + 1002 1010 1023 1026 --volume_ids wmparc.nii.gz 8 --volume_ids wmparc.nii.gz 10 11 12 13 17 18 26 28 \ + --volume_ids wmparc.nii.gz 2003 2012 2014 2017 2018 2019 2020 2024 2027 2028 2032 --volume_ids wmparc.nii.gz \ + 2008 2022 2025 2029 2031 --volume_ids wmparc.nii.gz 2005 2011 2013 2021 --volume_ids wmparc.nii.gz 2001 2006 \ + 2007 2009 2015 2015 2030 2033 --volume_ids wmparc.nii.gz 2002 2010 2023 2026 --volume_ids wmparc.nii.gz 49 50 \ + 51 52 53 54 58 60 --volume_ids wmparc.nii.gz 47 --volume_ids wmparc.nii.gz 16 --merge scil_dilate_labels.py atlas_lobes_v5.nii.gz atlas_lobes_v5_dilate.nii.gz --distance 2 \ - --label_to_dilate 1 2 3 4 5 6 8 9 10 11 12 14 15 --mask brain_mask.nii.gz + --labels_to_dilate 1 2 3 4 5 6 8 9 10 11 12 14 15 --mask brain_mask.nii.gz cp $params.atlas_utils_folder/freesurfer_utils/*lobes_v5* ./ """ } From d4bf895394d74dc55ff0c4f9550998ed6918930a Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 31 Oct 2023 21:11:01 -0400 Subject: [PATCH 25/54] explicited usage files, minor fix, improved ci --- .github/workflows/ci.yml | 11 +- README.md | 73 +++-- USAGE | 7 +- containers/license.txt | 4 - main.nf | 24 +- modules/connectomics/USAGE | 17 +- modules/connectomics/USAGE_ALL | 295 ++++++++++++++++++ modules/connectomics/USAGE_INFANT | 11 +- modules/connectomics/USAGE_TRACKING | 260 +++++++++++++++ modules/connectomics/USAGE_TRACKING_INFANT | 259 +++++++++++++++ modules/freesurfer/USAGE | 18 +- modules/freesurfer/USAGE_CONN | 162 ++++++++++ modules/freesurfer/processes/atlases.nf | 22 +- .../freesurfer/workflows/freesurferflow.nf | 30 +- modules/tracking/USAGE | 34 +- modules/tracking/USAGE_INFANT | 26 +- modules/tracking/workflows/preprocessing.nf | 10 +- nextflow.config | 10 +- 18 files changed, 1122 insertions(+), 151 deletions(-) delete mode 100644 containers/license.txt create mode 100644 modules/connectomics/USAGE_ALL create mode 100644 modules/connectomics/USAGE_TRACKING create mode 100644 modules/connectomics/USAGE_TRACKING_INFANT create mode 100644 modules/freesurfer/USAGE_CONN diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8dd8fee..336cd7e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,4 +9,13 @@ jobs: - uses: nf-core/setup-nextflow@v1 - name: Run pipeline run: | - nextflow run ${GITHUB_WORKSPACE} --help \ No newline at end of file + nextflow run ${GITHUB_WORKSPACE} --help + nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile tracking + nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile tracking,infant + nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile connectomics + nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile connectomics,infant + nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile tracking,connectomics + nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile tracking,connectomics,infant + nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile freesurfer + nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile freesurfer,connectomics + nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile freesurfer,connectomics,tracking \ No newline at end of file diff --git a/README.md b/README.md index d3665fa..44d08b6 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,33 @@ ChildBrainFlow Pipeline ======================= -Complete pipeline to perform tractography from infant diffusion MRI data. Adapted from the SCIL TractoFlow Pipeline (https://github.com/scilus/tractoflow.git) and Connectoflow Pipeline (https://github.com/scilus/connectoflow.git). -SINGULARITY ------------ -If you are running this pipeline on Linux, it is recommended you use the SCIL singularity container that contains all the relevant dependencies. -You can get the image by running this command: +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: -`` sudo singularity pull scilus.sif docker://scilus/scilus:latest`` + 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) -DOCKER ------- -If you are on MacOS or Windows, you can use Docker to run Infant-DWI. -Prebuilt image are available here: +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** -https://hub.docker.com/r/scilus/scilus +APPTAINER +--------- +If you are running this pipeline on Linux, it is recommended to run the pipeline using an apptainer image. +The pipeline comes with a recipe file (`` /containers/apptainer_recipe.def ``) containing all the required +dependencies to successfully run every profiles. To build the apptainer image, run this command: -DEPENDENCIES ------------- -You can also run Infant-DWI without any container, but you need these dependencies installed on your machine to make it work: +`` sudo apptainer build . `` USAGE ----- @@ -33,10 +35,33 @@ See _USAGE_ or run `` nextflow run main.nf --help `` for more details. REFERENCES ---------- -This pipeline is adapted from the SCIL TractoFlow pipeline, see: +If you used this pipeline, please cite : + +[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., + TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline + leveraging Nextflow & Singularity, NeuroImage, + https://doi.org/10.1016/j.neuroimage.2020.116889. + +[2] Kurtzer GM, Sochat V, Bauer MW Singularity: Scientific containers for mobility of compute. PLoS ONE 12(5) + (2017): e0177459. https://doi.org/10.1371/journal.pone.0177459 + +[3] P. Di Tommaso, et al. Nextflow enables reproducible computational workflows. Nature Biotechnology 35, + 316–319 (2017) https://doi.org/10.1038/nbt.3820 + +[4] Garyfallidis, E., Brett, M., Amirbekian, B., Rokem, A., Van Der Walt, S., Descoteaux, M., Nimmo-Smith, I., + 2014. Dipy, a library for the analysis of diffusion mri data. Frontiers in neuroinformatics 8, 8. + https://doi.org/10.3389/fninf.2014.00008 + +[5] Tournier, J. D., Smith, R. E., Raffelt, D. A., Tabbara, R., Dhollander, T., Pietsch, M., … & Connelly, A. + (2019). MRtrix3: A fast, flexible and open software framework for medical image processing and visualisation. + NeuroImage 202, https://doi.org/10.1016/j.neuroimage.2019.116137 + +[6] Avants, B. B., Tustison, N., & Song, G. (2009). Advanced normalization tools (ANTS). Insight j, 2(365), 1-35. -Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. +[7] Jenkinson, M., Beckmann, C.F., Behrens, T.E., Woolrich, M.W., Smith, S.M., 2012. Fsl. Neuroimage 62, + 782–790. https://doi.org/10.1016/j.neuroimage.2011.09.015 +[8] Fischl, B., Salat, D.H., Busa, E., Albert, M., Dieterich, M., Haselgrove, C., van der Kouwe, A., Killiany, + R., Kennedy, D., Klaveness, S., Montillo, A., Makris, N., Rosen, B., Dale, A.M., 2002. Whole brain + segmentation: automated labeling of neuroanatomical structures in the human brain. Neuron 33, 341-355. + https://doi.org/10.1016/s0896-6273(02)00569-x diff --git a/USAGE b/USAGE index ef497f9..0e111d4 100644 --- a/USAGE +++ b/USAGE @@ -7,7 +7,7 @@ It is essentially a merged version of multiple individual pipeline to avoid the between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which process have been taken: - 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 1. TractoFlow (https://github.com/scilus/tractoflow.git) 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) 3. Connectoflow (https://github.com/scilus/connectoflow) @@ -45,8 +45,3 @@ To view the required input files, select all the profiles you want to run (ex: t and run this command : nextflow run ChildBrainFlow/main.nf --help -profile tracking,connectomics,infant - -[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. diff --git a/containers/license.txt b/containers/license.txt deleted file mode 100644 index b05980a..0000000 --- a/containers/license.txt +++ /dev/null @@ -1,4 +0,0 @@ -anthony.gagnon7@usherbrooke.ca -50632 - *C/gExRm8XAxE - FSe2IOWbYUV/E diff --git a/main.nf b/main.nf index 060a793..804637d 100644 --- a/main.nf +++ b/main.nf @@ -195,19 +195,25 @@ if (!params.help) { def display_usage () { - if (params.run_tracking && !params.infant_config) { + if (params.run_tracking && !params.infant_config && !params.run_connectomics && !params.run_freesurfer ) { usage = file("$projectDir/modules/tracking/USAGE") - } - else if (params.run_tracking && params.infant_config) { + } else if (params.run_tracking && params.infant_config && !params.run_connectomics && !params.run_freesurfer ) { usage = file("$projectDir/modules/tracking/USAGE_INFANT") - } - else if (params.run_connectomics && !params.infant_config) { + } else if (params.run_connectomics && !params.infant_config && !params.run_tracking && !params.run_freesurfer ) { usage = file("$projectDir/modules/connectomics/USAGE") - } - else if (params.run_connectomics && params.infant_config) { + } else if (params.run_connectomics && params.infant_config && !params.run_tracking && !params.run_freesurfer ) { usage = file("$projectDir/modules/connectomics/USAGE_INFANT") - } - else { + } else if ( params.run_tracking && params.run_connectomics && !params.infant_config && !params.run_freesurfer ) { + usage = file("$projectDir/modules/connectomics/USAGE_TRACKING") + } else if ( params.run_tracking && params.run_connectomics && params.infant_config && !params.run_freesurfer ) { + usage = file("$projectDir/modules/connectomics/USAGE_TRACKING_INFANT") + } else if ( params.run_freesurfer && !params.run_tracking && !params.run_connectomics ) { + usage = file("$projectDir/modules/freesurfer/USAGE") + } else if ( params.run_freesurfer && !params.run_tracking && params.run_connectomics ) { + usage = file("$projectDir/modules/freesurfer/USAGE_CONN") + } else if ( params.run_freesurfer && params.run_tracking && params.run_connectomics ) { + usage = file("$projectDir/modules/connectomics/USAGE_ALL") + } else { usage = file("$projectDir/USAGE") } diff --git a/modules/connectomics/USAGE b/modules/connectomics/USAGE index 79ca104..23389d6 100644 --- a/modules/connectomics/USAGE +++ b/modules/connectomics/USAGE @@ -7,19 +7,13 @@ It is essentially a merged version of multiple individual pipeline to avoid the between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which process have been taken: - 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 1. TractoFlow (https://github.com/scilus/tractoflow.git) 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) 3. Connectoflow (https://github.com/scilus/connectoflow) *** Please note that some steps have been removed from the original pipelines if they were not relevant *** *** for pediatric data. If you need some of these steps, please use the original pipelines. *** -[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. - - Run Connectomics Pipeline nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile connectomics @@ -35,7 +29,7 @@ DESCRIPTION | ├-- *.bvec | |-- *t1.nii.gz [Registered to diff space.] | ├-- *.trk - | ├-- *labels.nii.gz [Native t1 space, optional if -profile freesurfer is used] + | ├-- *labels.nii.gz [Native t1 space] | ├-- *peaks.nii.gz | ├-- *fodf.nii.gz | ├-- OGenericAffine.mat @@ -48,7 +42,7 @@ DESCRIPTION ├-- *bvec |-- *t1.nii.gz [Registered to diff space.] ├-- *.trk - ├-- *labels.nii.gz [Native t1 space, optional if -profile freesurfer is used] + ├-- *labels.nii.gz [Native t1 space] ├-- *peaks.nii.gz ├-- *fodf.nii.gz ├-- OGenericAffine.mat @@ -79,7 +73,8 @@ DESCRIPTION --ball_stick If set, will use the ball&stick model and disable the zeppelin compartment for single-shell data. ($ball_stick) --para_diff Parallel diffusivity in mm^2/s ($para_diff) - --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) PROCESSES OPTIONS --processes The number of parallel processes to launch ($cpu_count). @@ -92,7 +87,7 @@ DESCRIPTION OUTPUT OPTIONS --output_dir Directory to write the final results. Default is - "./Results_Infant_Tracking/". + "./Results_ChildBrainFlow/". AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) diff --git a/modules/connectomics/USAGE_ALL b/modules/connectomics/USAGE_ALL new file mode 100644 index 0000000..3924ca3 --- /dev/null +++ b/modules/connectomics/USAGE_ALL @@ -0,0 +1,295 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Tracking Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | └-- *t1.nii.gz + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + └-- *t1.nii.gz + + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET T1 OPTIONS + --template_t1 Absolute path to the template T1 directory for antsBrainExtraction. + The folder must contain t1_template.nii.gz and + t1_brain_probability_map.nii.gz. The default path is the human_data + folder in the singularity container ($template_t1). + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + SEGMENT TISSUES OPTIONS + --number_of_tissues Number of tissues classes to segment. ($number_of_tissues) + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_bet_t1 Number of processes for BET T1 task ($processes_bet_t1) + --processes_denoise_t1 Number of processes for T1 denoising task ($processes_denoise_t1) + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[FREESURFERFLOW OPTIONS] + + --use_freesurfer_atlas If set, will use the freesurfer atlas if -profile connectomics is used. + ($use_freesurfer_atlas) + --use_brainnetome_atlas If set, will use the brainnetome atlas if -profile connectomics is used. + This is the default setting. ($use_brainnetome_atlas) + --use_glasser_atlas If set, will use the Glasser atlas if -profile connectomics is used. + ($use_glasser_atlas) + --use_schaefer_100_atlas If set, will use the Schaefer 100 atlas if -profile connectomics is used. + ($use_schaefer_100_atlas) + --use_schaefer_200_atlas If set, will use the Schaefer 200 atlas if -profile connectomics is used. + ($use_schaefer_200_atlas) + --use_schaefer_400_atlas If set, will use the Schaefer 400 atlas if -profile connectomics is used. + ($use_schaefer_400_atlas) + --use_lausanne_1_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_1_atlas) + --use_lausanne_2_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_2_atlas) + --use_lausanne_3_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_3_atlas) + --use_lausanne_4_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_4_atlas) + --use_lausanne_5_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_5_atlas) + --use_dilated_labels If set, will use the dilated version of the atlas selected above. + ($use_dilated_labels) + --atlas_utils_folder Folder needed to convert freesurfer atlas to other atlases. Default is + the path of folder within the container. ($atlas_utils_folder) + --nb_threads Number of threads used by recon-all and the atlases creation + ($nb_threads) + --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) + * FreeSurfer (adapted) + * Brainnetome + * Glasser + * Schaefer (100/200/400) + --compute_lausanne_multiscale Compute the connectivity multiscale atlases from Lausanne + ($compute_lausanne_multiscale) + --compute_lobes Compute the lobes atlas. ($compute_lobes) + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/USAGE_INFANT b/modules/connectomics/USAGE_INFANT index 854620d..bc606ca 100644 --- a/modules/connectomics/USAGE_INFANT +++ b/modules/connectomics/USAGE_INFANT @@ -7,19 +7,13 @@ It is essentially a merged version of multiple individual pipeline to avoid the between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which process have been taken: - 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 1. TractoFlow (https://github.com/scilus/tractoflow.git) 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) 3. Connectoflow (https://github.com/scilus/connectoflow) *** Please note that some steps have been removed from the original pipelines if they were not relevant *** *** for pediatric data. If you need some of these steps, please use the original pipelines. *** -[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. - - Run Connectomics Pipeline Infant Config nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile connectomics,infant @@ -79,6 +73,7 @@ DESCRIPTION --ball_stick If set, will use the ball&stick model and disable the zeppelin compartment for single-shell data. ($ball_stick) --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) PROCESSES OPTIONS @@ -92,7 +87,7 @@ DESCRIPTION OUTPUT OPTIONS --output_dir Directory to write the final results. Default is - "./Results_Infant_Tracking/". + "./Results_ChildBrainFlow/". AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) diff --git a/modules/connectomics/USAGE_TRACKING b/modules/connectomics/USAGE_TRACKING new file mode 100644 index 0000000..d8bc40e --- /dev/null +++ b/modules/connectomics/USAGE_TRACKING @@ -0,0 +1,260 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Tracking and Connectomics Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | └-- *t1.nii.gz + | ├-- *labels.nii.gz [Native t1 space] + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + ├-- *t1.nii.gz + ├-- *labels.nii.gz [Native t1 space] + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET T1 OPTIONS + --template_t1 Absolute path to the template T1 directory for antsBrainExtraction. + The folder must contain t1_template.nii.gz and + t1_brain_probability_map.nii.gz. The default path is the human_data + folder in the singularity container ($template_t1). + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + SEGMENT TISSUES OPTIONS + --number_of_tissues Number of tissues classes to segment. ($number_of_tissues) + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_bet_t1 Number of processes for BET T1 task ($processes_bet_t1) + --processes_denoise_t1 Number of processes for T1 denoising task ($processes_denoise_t1) + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/USAGE_TRACKING_INFANT b/modules/connectomics/USAGE_TRACKING_INFANT new file mode 100644 index 0000000..f6a1799 --- /dev/null +++ b/modules/connectomics/USAGE_TRACKING_INFANT @@ -0,0 +1,259 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Tracking and Connectomics Pipeline Infant Config + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics,infant + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | ├-- *t2w.nii.gz + | ├-- *wm_mask.nii.gz + | ├-- *labels.nii.gz [Native t2w space.] + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + ├-- *t2w.nii.gz + ├-- *wm_mask.nii.gz + ├-- *labels.nii.gz [Native t2w space.] + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET ANAT OPTIONS + --run_bet_anat If set, will perform brain extraction on the input anat volume. + ($run_bet_anat) + Default settings are soft to make sure an already brain extracted volume + is not impacted + by the bet command. The goal is to clean volumes that still have + portions of non-brain structures. + --bet_anat_f Fractional intensity threshold for bet. ($bet_anat_f) + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL OF SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/freesurfer/USAGE b/modules/freesurfer/USAGE index 502bed9..c189400 100644 --- a/modules/freesurfer/USAGE +++ b/modules/freesurfer/USAGE @@ -7,19 +7,13 @@ It is essentially a merged version of multiple individual pipeline to avoid the between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which process have been taken: - 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 1. TractoFlow (https://github.com/scilus/tractoflow.git) 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) 3. Connectoflow (https://github.com/scilus/connectoflow) *** Please note that some steps have been removed from the original pipelines if they were not relevant *** *** for pediatric data. If you need some of these steps, please use the original pipelines. *** -[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. - - Run FreeSurferFlow Pipeline nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile freesurfer @@ -64,8 +58,8 @@ OPTIONAL ARGUMENTS (current value) [FREESURFERFLOW OPTIONS] - --atlas_utils_folder Folder needed to convert freesurfer atlas to other atlases - ($atlas_util_folder) + --atlas_utils_folder Folder needed to convert freesurfer atlas to other atlases. Default is + the path of folder within the container. ($atlas_utils_folder) --nb_threads Number of threads used by recon-all and the atlases creation ($nb_threads) --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) @@ -79,6 +73,12 @@ OPTIONAL ARGUMENTS (current value) --processes The number of parallel processes to launch ($cpu_count). Only affects the local scheduler. +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) no_symlink When used, results will be directly copied in the output folder and diff --git a/modules/freesurfer/USAGE_CONN b/modules/freesurfer/USAGE_CONN new file mode 100644 index 0000000..33659d2 --- /dev/null +++ b/modules/freesurfer/USAGE_CONN @@ -0,0 +1,162 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run FreeSurferFlow Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile freesurfer,connectomics + +DESCRIPTION + + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | |-- *t1.nii.gz [Raw t1 image.] + | ├-- *.trk + | ├-- *peaks.nii.gz + | ├-- *fodf.nii.gz + | ├-- OGenericAffine.mat + | ├-- output1Warp.nii.gz + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + |-- *t1.nii.gz [Raw t1 image.] + ├-- *.trk + ├-- *peaks.nii.gz + ├-- *fodf.nii.gz + ├-- OGenericAffine.mat + ├-- output1Warp.nii.gz + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + + --use_freesurfer_atlas If set, will use the freesurfer atlas if -profile connectomics is used. + ($use_freesurfer_atlas) + --use_brainnetome_atlas If set, will use the brainnetome atlas if -profile connectomics is used. + This is the default setting. ($use_brainnetome_atlas) + --use_glasser_atlas If set, will use the Glasser atlas if -profile connectomics is used. + ($use_glasser_atlas) + --use_schaefer_100_atlas If set, will use the Schaefer 100 atlas if -profile connectomics is used. + ($use_schaefer_100_atlas) + --use_schaefer_200_atlas If set, will use the Schaefer 200 atlas if -profile connectomics is used. + ($use_schaefer_200_atlas) + --use_schaefer_400_atlas If set, will use the Schaefer 400 atlas if -profile connectomics is used. + ($use_schaefer_400_atlas) + --use_lausanne_1_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_1_atlas) + --use_lausanne_2_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_2_atlas) + --use_lausanne_3_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_3_atlas) + --use_lausanne_4_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_4_atlas) + --use_lausanne_5_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_5_atlas) + --use_dilated_labels If set, will use the dilated version of the atlas selected above. + ($use_dilated_labels) + + +OPTIONAL ARGUMENTS (current value) + +[FREESURFERFLOW OPTIONS] + + --atlas_utils_folder Folder needed to convert freesurfer atlas to other atlases. Default is + the path of folder within the container. ($atlas_utils_folder) + --nb_threads Number of threads used by recon-all and the atlases creation + ($nb_threads) + --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) + * FreeSurfer (adapted) + * Brainnetome + * Glasser + * Schaefer (100/200/400) + --compute_lausanne_multiscale Compute the connectivity multiscale atlases from Lausanne + ($compute_lausanne_multiscale) + --compute_lobes Compute the lobes atlas. ($compute_lobes) + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index 29c600a..85f9635 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -9,12 +9,12 @@ process FS_BN_GL_SF { tuple val(sid), path(folder) output: - tuple val(sid), path("*[freesurfer]*.nii.gz"), emit: freesurfer - tuple val(sid), path("*[brainnetome]*.nii.gz"), emit: brainnetome - tuple val(sid), path("*[glasser]*.nii.gz"), emit: glasser - tuple val(sid), path("*[schaefer_100]*.nii.gz"), emit: schaefer_100 - tuple val(sid), path("*[schaefer_200]*.nii.gz"), emit: schaefer_200 - tuple val(sid), path("*[schaefer_400]*.nii.gz"), emit: schaefer_400 + tuple val(sid), path("*freesurfer*.nii.gz"), emit: freesurfer + tuple val(sid), path("*brainnetome*.nii.gz"), emit: brainnetome + tuple val(sid), path("*glasser*.nii.gz"), emit: glasser + tuple val(sid), path("*schaefer_100*.nii.gz"), emit: schaefer_100 + tuple val(sid), path("*schaefer_200*.nii.gz"), emit: schaefer_200 + tuple val(sid), path("*schaefer_400*.nii.gz"), emit: schaefer_400 path("*[brainnetome,freesurfer,glasser,schaefer]*.txt") path("*[brainnetome,freesurfer,glasser,schaefer]*.json") @@ -80,11 +80,11 @@ process LAUSANNE { each scale output: - tuple val(sid), path("[lausanne_2008_scale_1]*.nii.gz"), emit: lausanne_1 - tuple val(sid), path("[lausanne_2008_scale_2]*.nii.gz"), emit: lausanne_2 - tuple val(sid), path("[lausanne_2008_scale_3]*.nii.gz"), emit: lausanne_3 - tuple val(sid), path("[lausanne_2008_scale_4]*.nii.gz"), emit: lausanne_4 - tuple val(sid), path("[lausanne_2008_scale_5]*.nii.gz"), emit: lausanne_5 + tuple val(sid), path("lausanne_2008_scale_1*.nii.gz"), emit: lausanne_1 + tuple val(sid), path("lausanne_2008_scale_2*.nii.gz"), emit: lausanne_2 + tuple val(sid), path("lausanne_2008_scale_3*.nii.gz"), emit: lausanne_3 + tuple val(sid), path("lausanne_2008_scale_4*.nii.gz"), emit: lausanne_4 + tuple val(sid), path("lausanne_2008_scale_5*.nii.gz"), emit: lausanne_5 path("*.txt") path("*.json") diff --git a/modules/freesurfer/workflows/freesurferflow.nf b/modules/freesurfer/workflows/freesurferflow.nf index bb21704..ea52f5a 100644 --- a/modules/freesurfer/workflows/freesurferflow.nf +++ b/modules/freesurfer/workflows/freesurferflow.nf @@ -41,8 +41,7 @@ workflow FREESURFERFLOW { labels = FS_BN_GL_SF.out.freesurfer .map{ [it[0], it[1]] } } - } - if ( params.use_brainnetome_atlas ) { + } else if ( params.use_brainnetome_atlas ) { if ( params.use_dilated_labels ) { labels = FS_BN_GL_SF.out.brainnetome .map{ [it[0], it[2]] } @@ -50,8 +49,7 @@ workflow FREESURFERFLOW { labels = FS_BN_GL_SF.out.brainnetome .map{ [it[0], it[1]] } } - } - if ( params.use_glasser_atlas ) { + } else if ( params.use_glasser_atlas ) { if ( params.use_dilated_labels ) { labels = FS_BN_GL_SF.out.glasser .map{ [it[0], it[2]] } @@ -59,8 +57,7 @@ workflow FREESURFERFLOW { labels = FS_BN_GL_SF.out.glasser .map{ [it[0], it[1]] } } - } - if ( params.use_schaefer_100_atlas ) { + } else if ( params.use_schaefer_100_atlas ) { if ( params.use_dilated_labels ) { labels = FS_BN_GL_SF.out.schaefer_100 .map{ [it[0], it[2]] } @@ -68,8 +65,7 @@ workflow FREESURFERFLOW { labels = FS_BN_GL_SF.out.schaefer_100 .map{ [it[0], it[1]] } } - } - if ( params.use_schaefer_200_atlas ) { + } else if ( params.use_schaefer_200_atlas ) { if ( params.use_dilated_labels ) { labels = FS_BN_GL_SF.out.schaefer_200 .map{ [it[0], it[2]] } @@ -77,8 +73,7 @@ workflow FREESURFERFLOW { labels = FS_BN_GL_SF.out.schaefer_200 .map{ [it[0], it[1]] } } - } - if ( params.use_schaefer_400_atlas ) { + } else if ( params.use_schaefer_400_atlas ) { if ( params.use_dilated_labels ) { labels = FS_BN_GL_SF.out.schaefer_400 .map{ [it[0], it[2]] } @@ -86,8 +81,7 @@ workflow FREESURFERFLOW { labels = FS_BN_GL_SF.out.schaefer_400 .map{ [it[0], it[1]] } } - } - if ( params.use_lausanne_1_atlas ) { + } else if ( params.use_lausanne_1_atlas ) { if ( params.use_dilated_labels ) { labels = LAUSANNE.out.lausanne_1 .map{ [it[0], it[2]] } @@ -95,8 +89,7 @@ workflow FREESURFERFLOW { labels = LAUSANNE.out.lausanne_1 .map{ [it[0], it[1]] } } - } - if ( params.use_lausanne_2_atlas ) { + } else if ( params.use_lausanne_2_atlas ) { if ( params.use_dilated_labels ) { labels = LAUSANNE.out.lausanne_2 .map{ [it[0], it[2]] } @@ -104,8 +97,7 @@ workflow FREESURFERFLOW { labels = LAUSANNE.out.lausanne_2 .map{ [it[0], it[1]] } } - } - if ( params.use_lausanne_3_atlas ) { + } else if ( params.use_lausanne_3_atlas ) { if ( params.use_dilated_labels ) { labels = LAUSANNE.out.lausanne_3 .map{ [it[0], it[2]] } @@ -113,8 +105,7 @@ workflow FREESURFERFLOW { labels = LAUSANNE.out.lausanne_3 .map{ [it[0], it[1]] } } - } - if ( params.use_lausanne_4_atlas ) { + } else if ( params.use_lausanne_4_atlas ) { if ( params.use_dilated_labels ) { labels = LAUSANNE.out.lausanne_4 .map{ [it[0], it[2]] } @@ -122,8 +113,7 @@ workflow FREESURFERFLOW { labels = LAUSANNE.out.lausanne_4 .map{ [it[0], it[1]] } } - } - if ( params.use_lausanne_5_atlas ) { + } else if ( params.use_lausanne_5_atlas ) { if ( params.use_dilated_labels ) { labels = LAUSANNE.out.lausanne_5 .map{ [it[0], it[2]] } diff --git a/modules/tracking/USAGE b/modules/tracking/USAGE index 09bfbea..f91c7d1 100644 --- a/modules/tracking/USAGE +++ b/modules/tracking/USAGE @@ -7,19 +7,13 @@ It is essentially a merged version of multiple individual pipeline to avoid the between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which process have been taken: - 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 1. TractoFlow (https://github.com/scilus/tractoflow.git) 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) 3. Connectoflow (https://github.com/scilus/connectoflow) *** Please note that some steps have been removed from the original pipelines if they were not relevant *** *** for pediatric data. If you need some of these steps, please use the original pipelines. *** -[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. - - Run Tracking Pipeline nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking @@ -34,21 +28,14 @@ DESCRIPTION | ├-- *.bval | ├-- *.bvec | ├-- *revb0.nii.gz - | ├-- *t1w.nii.gz - | ├-- *labels.nii.gz [Required if -profile connectomics is selected, not required when -profile freesurfer is used.] - | └-- metrics - | └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also - | selected] + | └-- *t1.nii.gz └-- S2 ├-- *dwi.nii.gz ├-- *bval ├-- *bvec ├-- *revb0.nii.gz - ├-- *t1w.nii.gz - ├-- *labels.nii.gz [Required if -profile connectomics is selected, not required when -profile freesurfer is used.] - └-- metrics - └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also - selected] + └-- *t1.nii.gz + OPTIONAL ARGUMENTS (current value) @@ -66,7 +53,7 @@ OPTIONAL ARGUMENTS (current value) --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) BET T1 OPTIONS - --template_t1 Path to the template T1 directory for antsBrainExtraction. + --template_t1 Absolute path to the template T1 directory for antsBrainExtraction. The folder must contain t1_template.nii.gz and t1_brain_probability_map.nii.gz. The default path is the human_data folder in the singularity container ($template_t1). @@ -134,7 +121,7 @@ OPTIONAL ARGUMENTS (current value) FRF OPTIONS --mean_frf Mean the FRF of all subjects. ($mean_frf) - USE ONLY IF ALL OF SUBJECTS COME FROM THE SAME SCANNER + USE ONLY IF ALL SUBJECTS COME FROM THE SAME SCANNER AND HAVE THE SAME ACQUISITION. --fa Initial FA threshold to compute the frf. ($fa) --min_fa Minimum FA threshold to compute the frf. ($min_fa) @@ -143,6 +130,9 @@ OPTIONAL ARGUMENTS (current value) --set_frf If selected, will manually set the frf. ($set_frf) --manual_frf FRF set manually (--manual_frf "$manual_frf") + SEGMENT TISSUES OPTIONS + --number_of_tissues Number of tissues classes to segment. ($number_of_tissues) + LOCAL SEEDING AND TRAKING OPTIONS --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) @@ -155,7 +145,7 @@ OPTIONAL ARGUMENTS (current value) --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant voxel in fa maps. ($local_erosion) --local_algo Tracking algorithm [prob, det]. ($local_algo) - --local_nb_seeds Number of seeds related to the seeding type param. ($local_nb_seeds) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) --local_seeding Seeding type [npv, nt]. ($local_seeding) --local_step_size Step size ($local_step_size) --local_theta Maximum angle between 2 steps. ($local_theta) @@ -174,7 +164,7 @@ OPTIONAL ARGUMENTS (current value) ($pft_fa_seeding_mask_thr) --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) --pft_algo Tracking algorithm [prob, det]. ($pft_algo) - --pft_nb_seeds Number of seeds related to the seeding type param. ($pft_nb_seeds) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) --pft_seeding Seeding type [npv, nt]. ($pft_seeding) --pft_step_size Step size ($pft_step_size) --pft_theta Maximum angle between 2 steps. ($pft_theta) @@ -189,6 +179,8 @@ OPTIONAL ARGUMENTS (current value) PROCESSES OPTIONS --processes The number of parallel processes to launch ($cpu_count). Only affects the local scheduler. + --processes_bet_t1 Number of processes for BET T1 task ($processes_bet_t1) + --processes_denoise_t1 Number of processes for T1 denoising task ($processes_denoise_t1) --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) --processes_eddy Number of processes for EDDY task. ($processes_eddy) --processes_registration Number of processes for registration task. ($processes_registration) diff --git a/modules/tracking/USAGE_INFANT b/modules/tracking/USAGE_INFANT index d7c0bf4..fee0931 100644 --- a/modules/tracking/USAGE_INFANT +++ b/modules/tracking/USAGE_INFANT @@ -7,19 +7,13 @@ It is essentially a merged version of multiple individual pipeline to avoid the between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which process have been taken: - 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 1. TractoFlow (https://github.com/scilus/tractoflow.git) 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) 3. Connectoflow (https://github.com/scilus/connectoflow) *** Please note that some steps have been removed from the original pipelines if they were not relevant *** *** for pediatric data. If you need some of these steps, please use the original pipelines. *** -[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. - - Run Tracking Pipeline Infant Config nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,infant @@ -35,22 +29,14 @@ DESCRIPTION | ├-- *.bvec | ├-- *revb0.nii.gz | ├-- *t2w.nii.gz - | ├-- *wm_mask.nii.gz - | ├-- *labels.nii.gz [Only if connectomics is also selected] - | └-- metrics - | └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also - | selected] + | └-- *wm_mask.nii.gz └-- S2 ├-- *dwi.nii.gz ├-- *bval ├-- *bvec ├-- *revb0.nii.gz ├-- *t2w.nii.gz - ├-- *wm_mask.nii.gz - ├-- *labels.nii.gz [Only if connectomics is also selected] - └-- metrics - └-- METRIC_NAME.nii.gz [Optional, only if connectomics is also - selected] + └-- *wm_mask.nii.gz OPTIONAL ARGUMENTS (current value) @@ -160,7 +146,7 @@ OPTIONAL ARGUMENTS (current value) --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant voxel in fa maps. ($local_erosion) --local_algo Tracking algorithm [prob, det]. ($local_algo) - --local_nb_seeds Number of seeds related to the seeding type param. ($local_nb_seeds) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) --local_seeding Seeding type [npv, nt]. ($local_seeding) --local_step_size Step size ($local_step_size) --local_theta Maximum angle between 2 steps. ($local_theta) @@ -179,7 +165,7 @@ OPTIONAL ARGUMENTS (current value) ($pft_fa_seeding_mask_thr) --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) --pft_algo Tracking algorithm [prob, det]. ($pft_algo) - --pft_nb_seeds Number of seeds related to the seeding type param. ($pft_nb_seeds) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) --pft_seeding Seeding type [npv, nt]. ($pft_seeding) --pft_step_size Step size ($pft_step_size) --pft_theta Maximum angle between 2 steps. ($pft_theta) @@ -203,7 +189,7 @@ OPTIONAL ARGUMENTS (current value) OUTPUT OPTIONS --output_dir Directory to write the final results. Default is - "./Results_Infant_Tracking/". + "./Results_ChildBrainFlow/". AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) diff --git a/modules/tracking/workflows/preprocessing.nf b/modules/tracking/workflows/preprocessing.nf index 7003f9c..07d6555 100644 --- a/modules/tracking/workflows/preprocessing.nf +++ b/modules/tracking/workflows/preprocessing.nf @@ -109,11 +109,13 @@ workflow ANAT { anat_channel main: - // ** Denoising ** // - DENOISE_T1(anat_channel) + if ( ! params.infant_config ) { + // ** Denoising ** // + DENOISE_T1(anat_channel) - // ** N4 ** // - N4_T1(DENOISE_T1.out.t1_denoised) + // ** N4 ** // + N4_T1(DENOISE_T1.out.t1_denoised) + } // ** Resampling ** // if ( params.infant_config ) { diff --git a/nextflow.config b/nextflow.config index 3b82a3b..473bf4c 100644 --- a/nextflow.config +++ b/nextflow.config @@ -140,10 +140,10 @@ params { no_pruning = false no_remove_loops = false no_remove_outliers = false - min_length = 10 + min_length = 20 max_length = 200 loop_max_angle = 330 - outlier_threshold = 0.4 + outlier_threshold = 0.5 //** COMMIT Options **// run_commit = true @@ -268,7 +268,11 @@ profiles { params.local_min_len = 15 params.local_fa_seeding_mask_thr = 0.1 - //** COMMIT Options **// + // ** DECOMPOSE Options ** // + min_length = 10 + outlier_threshold = 0.4 + + // ** COMMIT Options ** // params.run_commit = true params.use_commit2 = false params.commit_on_trk = true From 4df49b58cfb6a8df8b19f9644b40626e3ea844d6 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 31 Oct 2023 21:12:57 -0400 Subject: [PATCH 26/54] fix workspace in CI --- .github/workflows/ci.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 336cd7e..63d05a5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,12 +10,12 @@ jobs: - name: Run pipeline run: | nextflow run ${GITHUB_WORKSPACE} --help - nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile tracking - nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile tracking,infant - nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile connectomics - nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile connectomics,infant - nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile tracking,connectomics - nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile tracking,connectomics,infant - nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile freesurfer - nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile freesurfer,connectomics - nextflow run ~/code/Repositories/Infant-DWI/main.nf --help -profile freesurfer,connectomics,tracking \ No newline at end of file + nextflow run ${GITHUB_WORKSPACE} --help -profile tracking + nextflow run ${GITHUB_WORKSPACE} --help -profile tracking,infant + nextflow run ${GITHUB_WORKSPACE} --help -profile connectomics + nextflow run ${GITHUB_WORKSPACE} --help -profile connectomics,infant + nextflow run ${GITHUB_WORKSPACE} --help -profile tracking,connectomics + nextflow run ${GITHUB_WORKSPACE} --help -profile tracking,connectomics,infant + nextflow run ${GITHUB_WORKSPACE} --help -profile freesurfer + nextflow run ${GITHUB_WORKSPACE} --help -profile freesurfer,connectomics + nextflow run ${GITHUB_WORKSPACE} --help -profile freesurfer,connectomics,tracking \ No newline at end of file From 96ecd44e82624914eb708adbc35dc11bd077db6d Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 1 Nov 2023 11:18:38 -0400 Subject: [PATCH 27/54] complete docs, logs and links between flows --- main.nf | 81 +++++++++++++++---- .../{transform_labels.nf => transform.nf} | 17 ++++ .../connectomics/workflows/connectomics.nf | 23 ++++-- modules/freesurfer/processes/freesurfer.nf | 2 + modules/io.nf | 6 +- 5 files changed, 105 insertions(+), 24 deletions(-) rename modules/connectomics/processes/{transform_labels.nf => transform.nf} (54%) diff --git a/main.nf b/main.nf index 804637d..b7cb834 100644 --- a/main.nf +++ b/main.nf @@ -168,8 +168,10 @@ workflow { if ( params.run_freesurfer ) { labels = FREESURFERFLOW.out.labels + anat = FREESURFERFLOW.out.t1 } else { labels = data.labels + anat = data.anat } metrics = data.metrics.transpose().groupTuple() @@ -179,7 +181,7 @@ workflow { data.dwi_peaks, data.fodf, metrics, - data.t2w, + anat, data.transfos) } } @@ -355,12 +357,11 @@ def display_usage () { def display_run_info () { log.info "" - log.info "Infant-DWI pipeline" + log.info "ChildBrainFlow pipeline" log.info "========================" - log.info "Pipeline adapted from the SCIL Tractoflow pipeline " - log.info "(https://github.com/scilus/tractoflow.git) and the " - log.info "Connectoflow Pipeline (https://github.com/scilus/connectoflow.git)." - log.info "Made for use on newborn diffusion MRI data." + log.info "ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. " + log.info "It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs " + log.info "between flows with some parameters tuned for pediatric brain scans. " log.info "" log.info "Start time: $workflow.start" log.info "" @@ -384,14 +385,21 @@ def display_run_info () { log.info "GLOBAL OPTIONS" log.info "Threshold for b0: $params.b0_thr" log.info "DWI Shell Tolerance: $params.dwi_shell_tolerance" + log.info "Skip DWI preprocessing: $params.skip_dwi_preprocessing" log.info "" log.info "BET DWI OPTIONS" log.info "Initial fractional value for BET: $params.initial_bet_f" log.info "Finale fractional value for BET: $params.final_bet_f" log.info "" - log.info "BET T2W OPTIONS" - log.info "Run BET on T2W image: $params.run_bet_anat" - log.info "Fractional value for T2W BET: $params.bet_anat_f" + if ( params.infant_config ) { + log.info "BET T2W OPTIONS" + log.info "Run BET on T2W image: $params.run_bet_anat" + log.info "Fractional value for T2W BET: $params.bet_anat_f" + } + else { + log.info "BET T1W OPTIONS" + log.info "T1 Tempalte: $params.template_t1" + } log.info "" log.info "EDDY AND TOPUP OPTIONS" log.info "Configuration for topup: $params.topup_config" @@ -416,7 +424,12 @@ def display_run_info () { log.info "Interpolation method for DWI mask: $params.mask_dwi_interpolation" log.info "" log.info "EXTRACT DWI SHELLS OPTIONS" - log.info "Maximum DTI shell value: $params.max_dti_shell_value" + if ( params.dti_shells ) { + log.info "DTI shells: $params.dti_shells" + } + else { + log.info "Maximum DTI shell value: $params.max_dti_shell_value" + } log.info "" log.info "SH FITTING OPTIONS" log.info "Run SH fitting: $params.sh_fitting" @@ -425,6 +438,7 @@ def display_run_info () { log.info "" log.info "FODF OPTIONS" log.info "Minimum fODF shell value: $params.min_fodf_shell_value" + log.info "FODF Metrics A factor: $params.fodf_metrics_a_factor" log.info "Maximum FA value in ventricles: $params.max_fa_in_ventricle" log.info "Minimum MD value in ventricles: $params.min_md_in_ventricle" log.info "Relative threshold (RT): $params.relative_threshold" @@ -440,10 +454,10 @@ def display_run_info () { log.info "Set FRF: $params.set_frf" log.info "Manual FRF: $params.manual_frf" log.info "" + log.info "SEGMENT TISSUES OPTIONS" + log.info "Number of tissues: $params.number_of_tissues" + log.info "" log.info "SEEDING AND TRACKING OPTIONS" - log.info "Local tracking : $params.run_local_tracking" - log.info "PFT tracking: $params.run_pft_tracking" - if ( params.run_pft_tracking ) { log.info "Algorithm for tracking: $params.pft_algo" log.info "Number of seeds per voxel: $params.pft_nb_seeds" @@ -464,9 +478,12 @@ def display_run_info () { log.info "Maximum fiber length: $params.local_max_len" log.info "Compression: $params.local_compress_streamlines" } - log.info "" log.info "PROCESSES PER TASKS" + if ( !params.infant_config ) { + log.info "Processes for denoising T1: $params.processes_denoise_t1" + log.info "Processes for BET T1: $params.processes_bet_t1" + } log.info "Processes for denoising DWI: $params.processes_denoise_dwi" log.info "Processes for EDDY: $params.processes_eddy" log.info "Processes for registration: $params.processes_registration" @@ -474,6 +491,31 @@ def display_run_info () { log.info "" } + if ( params.run_freesurfer ) { + log.info "[Freesurfer Options]" + log.info "" + log.info "Atlas utils folder: $params.atlas_utils_folder" + log.info "Compute FS, BN, GL, SF: $params.compute_FS_BN_GL_SF" + log.info "Compute lobes: $params.compute_lobes" + log.info "Compute lausanne multiscale: $params.compute_lausanne_multiscale" + log.info "Number of threads: $params.nb_threads" + log.info "" + log.info "ATLAS SELECTION" + log.info "Use Freesurfer atlas: $params.use_freesurfer_atlas" + log.info "Use Brainnetome atlas: $params.use_brainnetome_atlas" + log.info "Use Glasser atlas: $params.use_glasser_atlas" + log.info "Use Schaefer 100 atlas: $params.use_schaefer_100_atlas" + log.info "Use Schaefer 200 atlas: $params.use_schaefer_200_atlas" + log.info "Use Schaefer 400 atlas: $params.use_schaefer_400_atlas" + log.info "Use Lausanne 1 atlas: $params.use_lausanne_1_atlas" + log.info "Use Lausanne 2 atlas: $params.use_lausanne_2_atlas" + log.info "Use Lausanne 3 atlas: $params.use_lausanne_3_atlas" + log.info "Use Lausanne 4 atlas: $params.use_lausanne_4_atlas" + log.info "Use Lausanne 5 atlas: $params.use_lausanne_5_atlas" + log.info "Use dilated labels: $params.use_dilated_labels" + log.info "" + } + if ( params.run_connectomics ) { log.info "[Connectomics Options]" log.info "" @@ -481,13 +523,20 @@ def display_run_info () { log.info "No pruning: $params.no_pruning" log.info "No remove loops: $params.no_remove_loops" log.info "No remove outliers: $params.no_remove_outliers" - log.info "Minimal outlier length: $params.min_length" - log.info "Maximal outlier lenght: $params.max_length" + log.info "Minimal length: $params.min_length" + log.info "Maximal length: $params.max_length" log.info "Maximum looping angle: $params.loop_max_angle" + log.info "Outlier treshold: $params.outlier_threshold" log.info "" log.info "COMMIT OPTIONS" + log.info "Run COMMIT: $params.run_commit" + log.info "Use COMMIT2: $params.use_commit2" + log.info "COMMIT on trk: $params.commit_on_trk" + log.info "B-value threshold: $params.b_thr" log.info "Number of directions: $params.nbr_dir" + log.info "Ball and stick: $params.ball_stick" log.info "Parallel diffusivity: $params.para_diff" + log.info "Perpendicular diffusivity: $params.perp_diff" log.info "Isotropic diffusivity: $params.iso_diff" log.info "" log.info "PROCESSES OPTIONS" diff --git a/modules/connectomics/processes/transform_labels.nf b/modules/connectomics/processes/transform.nf similarity index 54% rename from modules/connectomics/processes/transform_labels.nf rename to modules/connectomics/processes/transform.nf index 6de3dde..7d805db 100644 --- a/modules/connectomics/processes/transform_labels.nf +++ b/modules/connectomics/processes/transform.nf @@ -18,4 +18,21 @@ process TRANSFORM_LABELS { scil_image_math.py convert ${sid}__labels_warped.nii.gz ${sid}__labels_warped.nii.gz \ --data_type int16 -f """ +} + +process TRANSFORM_T1 { + cpus 1 + memory '2 GB' + + input: + tuple val(sid), path(t1), path(dwi), path(bval), path(bvec), path(mat), path(syn) + output: + tuple val(sid), path("${sid}__t1_warped.nii.gz"), emit: t1_warped + script: + """ + scil_extract_b0.py $dwi $bval $bvec b0.nii.gz --mean\ + --b0_thr $params.b0_thr --force_b0_threshold + antsApplyTransforms -d 3 -i $t1 -r b0.nii.gz -o ${sid}__t1_warped.nii.gz \ + -t $syn $mat -n Linear + """ } \ No newline at end of file diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index 8add5b8..0d0e9c3 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -2,7 +2,8 @@ nextflow.enable.dsl=2 -include { TRANSFORM_LABELS } from "../processes/transform_labels.nf" +include { TRANSFORM_LABELS; + TRANSFORM_T1 } from "../processes/transform.nf" include { DECOMPOSE_CONNECTIVITY } from "../processes/decompose.nf" include { COMMIT; COMMIT_ON_TRK } from "../processes/commit.nf" @@ -17,15 +18,27 @@ workflow CONNECTOMICS { dwi_peaks_channel fodf_channel metrics_channel - t2w_channel + anat_channel transfos_channel main: + // ** If -profile freesurfer, transform t1 to diff space. ** // + if ( params.run_freesurfer && !params.run_tracking ) { + t1_for_transfo = anat_channel + .combine(dwi_peaks_channel.map{ [it[0], it[1], it[2], it[3]] }, by: 0) + .combine(transfos_channel, by: 0) + TRANSFORM_T1(t1_for_transfo) + channel_for_transfo = labels_channel + .combine(TRANSFORM_T1.out.t1_warped, by: 0) + .combine(transfos_channel, by: 0) + } else { + channel_for_transfo = labels_channel + .combine(anat_channel, by: 0) + .combine(transfos_channel, by: 0) + } + // ** Transforming labels to diff space ** // - channel_for_transfo = labels_channel - .combine(t2w_channel, by: 0) - .combine(transfos_channel, by: 0) TRANSFORM_LABELS(channel_for_transfo) // ** If -profile infant is used, first part will be run. COMMIT1 is the only supported ** // diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index 304e4b8..5700e3f 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -9,10 +9,12 @@ process FREESURFER { tuple val(sid), path(anat) output: tuple val(sid), path("$sid/"), emit: folders + tuple val(sid), path("${sid}__final_t1.nii.gz"), emit: final_t1 script: """ export SUBJECTS_DIR=. recon-all -i $anat -s $sid -all -parallel -openmp $params.nb_threads + mri_convert $sid/mri/antsdn.brain.mgz ${sid}__final_t1.nii.gz """ } \ No newline at end of file diff --git a/modules/io.nf b/modules/io.nf index 9d87df2..4f48aef 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -175,7 +175,7 @@ workflow get_data_connectomics { { fetch_id(it.parent, input) } metrics_channel = Channel.fromFilePairs("$input/**/metrics/*.nii.gz", size: -1, maxDepth: 2) { it.parent.parent.name } - t2w_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) + t1_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } transfos_channel = Channel.fromFilePairs("$input/**/{0GenericAffine.mat,output1Warp.nii.gz}", size: 2, flat: true) { fetch_id(it.parent, input) } @@ -189,7 +189,7 @@ workflow get_data_connectomics { dwi_peaks = dwi_peaks_channel fodf = fodf_channel metrics = metrics_channel - t2w = t2w_channel + anat = t1_channel transfos = transfos_channel } @@ -257,7 +257,7 @@ workflow get_data_connectomics_infant { dwi_peaks = dwi_peaks_channel fodf = fodf_channel metrics = metrics_channel - t2w = t2w_channel + anat = t2w_channel transfos = transfos_channel } From c4ff0acea079bdcb83dc7e130218706f903541aa Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 1 Nov 2023 11:19:20 -0400 Subject: [PATCH 28/54] add t1 as output --- modules/freesurfer/workflows/freesurferflow.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/freesurfer/workflows/freesurferflow.nf b/modules/freesurfer/workflows/freesurferflow.nf index ea52f5a..4780e27 100644 --- a/modules/freesurfer/workflows/freesurferflow.nf +++ b/modules/freesurfer/workflows/freesurferflow.nf @@ -125,4 +125,5 @@ workflow FREESURFERFLOW { emit: labels + t1 = FREESURFER.out.final_t1 } \ No newline at end of file From 53215cb60a05e8409d152a860cd7e3c742202d72 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 1 Nov 2023 11:23:09 -0400 Subject: [PATCH 29/54] fix typos --- main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main.nf b/main.nf index b7cb834..eb1817b 100644 --- a/main.nf +++ b/main.nf @@ -460,7 +460,7 @@ def display_run_info () { log.info "SEEDING AND TRACKING OPTIONS" if ( params.run_pft_tracking ) { log.info "Algorithm for tracking: $params.pft_algo" - log.info "Number of seeds per voxel: $params.pft_nb_seeds" + log.info "Number of seeds per voxel: $params.pft_nbr_seeds" log.info "Seeding method: $params.pft_seeding" log.info "Step size: $params.pft_step_size" log.info "Theta threshold: $params.pft_theta" @@ -470,7 +470,7 @@ def display_run_info () { } else { log.info "Algorithm for tracking: $params.local_algo" - log.info "Number of seeds per voxel: $params.local_nb_seeds" + log.info "Number of seeds per voxel: $params.local_nbr_seeds" log.info "Seeding method: $params.local_seeding" log.info "Step size: $params.local_step_size" log.info "Theta threshold: $params.local_theta" From 2b856bb3921c8dcdf226cd77fbc4d949c3394353 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:36:14 -0400 Subject: [PATCH 30/54] fix links --- containers/Dockerfile | 8 ++++---- containers/singularity_recipe.def | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/containers/Dockerfile b/containers/Dockerfile index cc8afc1..65aa8d8 100644 --- a/containers/Dockerfile +++ b/containers/Dockerfile @@ -2,9 +2,9 @@ FROM scilus/scilus-flows:1.5.0 LABEL version="ChildBrainFlow-1.0.0" -RUN wget https://www.dropbox.com/scl/fi/izy3yie5cdqqi7my2ogfa/FS_BN_GL_SF_utils.tar.gz?rlkey=kt8qi4ye7h3om3ymnq0n0h570&dl=0 -O FS_BN_GL_SF_utils.tar.gz && \ - tar -xvf FS_BN_GL_SF_utils.tar && \ - rm FS_BN_GL_SF_utils.tar +RUN wget -O FS_BN_GL_SF_utils.tar.gz "https://www.dropbox.com/scl/fi/6s1tc4eanf2sutejw7fkd/FS_BN_GL_SF_utils.tar.gz?rlkey=3gvhvpepv7ldkqef3go10cb5e&dl=0" && \ + tar -xzvf FS_BN_GL_SF_utils.tar.gz && \ + rm FS_BN_GL_SF_utils.tar.gz # Installing freesurfer on top of scilus:1.5.0 WORKDIR /root @@ -12,7 +12,7 @@ RUN wget https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.2.0/freesurfer tar --no-same-owner -xzvf fs.tar.gz && \ mv freesurfer /usr/local && \ rm fs.tar.gz -COPY license.txt /usr/local/freesurfer/license.txt +RUN wget -O /usr/local/freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 RUN apt-get update && \ apt-get install csh tcsh RUN wget http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 \ diff --git a/containers/singularity_recipe.def b/containers/singularity_recipe.def index 204f26a..9e2ba81 100644 --- a/containers/singularity_recipe.def +++ b/containers/singularity_recipe.def @@ -18,9 +18,9 @@ From: scilus/scilus-flows:1.5.0 # Download additional files wget -O /usr/local/freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 - wget -O FS_BN_GL_SF_utils.tar.gz https://www.dropbox.com/scl/fi/izy3yie5cdqqi7my2ogfa/FS_BN_GL_SF_utils.tar.gz?rlkey=kt8qi4ye7h3om3ymnq0n0h570&dl=0 - tar -xvf FS_BN_GL_SF_utils.tar -C ${APPTAINER_ROOTFS}/ - rm FS_BN_GL_SF_utils.tar + wget -O $APPTAINER_ROOTFS/FS_BN_GL_SF_utils.tar.gz https://www.dropbox.com/scl/fi/6s1tc4eanf2sutejw7fkd/FS_BN_GL_SF_utils.tar.gz?rlkey=3gvhvpepv7ldkqef3go10cb5e&dl=0 && \ + tar -xzvf $APPTAINER_ROOTFS/FS_BN_GL_SF_utils.tar.gz && \ + rm FS_BN_GL_SF_utils.tar.gz # Setup parallel wget http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 From 99ae5817074ace93b403aafa217a5bde41dcc92a Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:38:59 -0400 Subject: [PATCH 31/54] change image filename --- containers/{singularity_recipe.def => apptainer_recipe.def} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename containers/{singularity_recipe.def => apptainer_recipe.def} (100%) diff --git a/containers/singularity_recipe.def b/containers/apptainer_recipe.def similarity index 100% rename from containers/singularity_recipe.def rename to containers/apptainer_recipe.def From 60897b6ecbcf99b5049c4b711ebc0f7e27539e48 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 2 Nov 2023 13:43:05 -0400 Subject: [PATCH 32/54] change output structure, readme and containers --- README.md | 12 +- containers/Dockerfile | 3 +- containers/apptainer_recipe.def | 7 +- main.nf | 201 +----------------- modules/connectomics/processes/commit.nf | 12 +- .../connectomics/processes/compute_metrics.nf | 12 +- modules/connectomics/processes/decompose.nf | 6 +- modules/connectomics/processes/transform.nf | 11 +- modules/connectomics/processes/viz.nf | 6 +- modules/freesurfer/processes/atlases.nf | 23 +- modules/freesurfer/processes/freesurfer.nf | 5 + modules/io.nf | 195 +++++++++++++++++ modules/tracking/processes/DTI_processes.nf | 30 ++- modules/tracking/processes/FODF_processes.nf | 57 +++-- modules/tracking/processes/SH_processes.nf | 12 +- modules/tracking/processes/preprocess.nf | 101 +++++++-- .../processes/registration_processes.nf | 12 +- .../tracking/processes/tracking_processes.nf | 48 ++++- nextflow.config | 16 +- 19 files changed, 489 insertions(+), 280 deletions(-) diff --git a/README.md b/README.md index 44d08b6..7e65e7b 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,16 @@ process have been taken: 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) 3. Connectoflow (https://github.com/scilus/connectoflow) -*** Please note that some steps have been removed from the original pipelines if they were not relevant *** -*** for pediatric data. If you need some of these steps, please use the original pipelines. *** +*** Please note that some steps have been removed from the original pipelines if they were not relevant for pediatric data. If you need some of these steps, please use the original pipelines. *** + +NEXTFLOW +-------- +To install nextflow, please see : https://www.nextflow.io/docs/latest/getstarted.html#requirements + +The pipeline export by default a `` parameters.json `` within the output directory to provide a documentation of the parameters used during the execution. For a more detailed report (excluding execution's parameters), +the default feature of nextflow `` -with-report `` can be used to export a html report. Simply had this your command line when launching the pipeline: + +`` nextflow run main.nf --input -with-report `` APPTAINER --------- diff --git a/containers/Dockerfile b/containers/Dockerfile index 65aa8d8..c63bb98 100644 --- a/containers/Dockerfile +++ b/containers/Dockerfile @@ -14,7 +14,8 @@ RUN wget https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.2.0/freesurfer rm fs.tar.gz RUN wget -O /usr/local/freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 RUN apt-get update && \ - apt-get install csh tcsh + apt-get install csh tcsh && \ + apt-get install libglu1-mesa RUN wget http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 \ tar xjf parallel-latest.tar.bz2 \ cd parallel-* && ./configure && make && make install \ diff --git a/containers/apptainer_recipe.def b/containers/apptainer_recipe.def index 9e2ba81..46cc309 100644 --- a/containers/apptainer_recipe.def +++ b/containers/apptainer_recipe.def @@ -16,10 +16,13 @@ From: scilus/scilus-flows:1.5.0 apt-get update apt-get install -y csh tcsh + # Install libGLU + apt-get install -y libglu1-mesa + # Download additional files wget -O /usr/local/freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 - wget -O $APPTAINER_ROOTFS/FS_BN_GL_SF_utils.tar.gz https://www.dropbox.com/scl/fi/6s1tc4eanf2sutejw7fkd/FS_BN_GL_SF_utils.tar.gz?rlkey=3gvhvpepv7ldkqef3go10cb5e&dl=0 && \ - tar -xzvf $APPTAINER_ROOTFS/FS_BN_GL_SF_utils.tar.gz && \ + wget -O FS_BN_GL_SF_utils.tar.gz "https://www.dropbox.com/scl/fi/6s1tc4eanf2sutejw7fkd/FS_BN_GL_SF_utils.tar.gz?rlkey=3gvhvpepv7ldkqef3go10cb5e&dl=0" && \ + tar -xzvf FS_BN_GL_SF_utils.tar.gz -C $APPTAINER_ROOTFS/ && \ rm FS_BN_GL_SF_utils.tar.gz # Setup parallel diff --git a/main.nf b/main.nf index eb1817b..fdd7887 100644 --- a/main.nf +++ b/main.nf @@ -1,5 +1,7 @@ #!/usr/bin/env nextflow +import groovy.json.JsonOutput + nextflow.enable.dsl=2 params.help = false @@ -11,7 +13,8 @@ include { fetch_id; get_data_tracking_infant; get_data_connectomics; get_data_connectomics_infant; - get_data_template } from "./modules/io.nf" + get_data_template; + display_run_info } from "./modules/io.nf" include { DWI; ANAT } from "./modules/tracking/workflows/preprocessing.nf" include { DTI } from "./modules/tracking/workflows/DTI.nf" @@ -189,6 +192,8 @@ workflow { if (!params.help) { workflow.onComplete = { + jsonStr = JsonOutput.toJson(params) + file("${params.output_dir}/parameters.json").text = JsonOutput.prettyPrint(jsonStr) log.info "Pipeline completed at : $workflow.complete" log.info "Execution status : ${ workflow.success ? 'COMPLETED' : 'FAILED'}" log.info "Execution duration : $workflow.duration" @@ -242,10 +247,12 @@ def display_usage () { "dwi_resolution":"$params.dwi_resolution", "dwi_interpolation":"$params.dwi_interpolation", "mask_dwi_interpolation":"$params.mask_dwi_interpolation", + "dti_shells":"$params.dti_shells", "max_dti_shell_value":"$params.max_dti_shell_value", "sh_fitting":"$params.sh_fitting", "sh_fitting_order":"$params.sh_fitting_order", "sh_fitting_basis":"$params.sh_fitting_basis", + "fodf_shells":"$params.fodf_shells", "min_fodf_shell_value":"$params.min_fodf_shell_value", "fodf_metrics_a_facotr":"$params.fodf_metrics_a_factor", "max_fa_in_ventricle":"$params.max_fa_in_ventricle", @@ -353,196 +360,4 @@ def display_usage () { template = engine.createTemplate(usage.text).make(bindings) print template.toString() -} - -def display_run_info () { - log.info "" - log.info "ChildBrainFlow pipeline" - log.info "========================" - log.info "ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. " - log.info "It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs " - log.info "between flows with some parameters tuned for pediatric brain scans. " - log.info "" - log.info "Start time: $workflow.start" - log.info "" - - log.debug "[Command-line]" - log.debug "$workflow.commandLine" - log.debug "" - - log.info "[Git Info]" - log.info "$workflow.repository - $workflow.revision [$workflow.commitId]" - log.info "" - - log.info "[Inputs]" - log.info "Input: $params.input" - log.info "Output Directory: $params.output_dir" - log.info "" - - if ( params.run_tracking ) { - log.info "[Tracking Options]" - log.info "" - log.info "GLOBAL OPTIONS" - log.info "Threshold for b0: $params.b0_thr" - log.info "DWI Shell Tolerance: $params.dwi_shell_tolerance" - log.info "Skip DWI preprocessing: $params.skip_dwi_preprocessing" - log.info "" - log.info "BET DWI OPTIONS" - log.info "Initial fractional value for BET: $params.initial_bet_f" - log.info "Finale fractional value for BET: $params.final_bet_f" - log.info "" - if ( params.infant_config ) { - log.info "BET T2W OPTIONS" - log.info "Run BET on T2W image: $params.run_bet_anat" - log.info "Fractional value for T2W BET: $params.bet_anat_f" - } - else { - log.info "BET T1W OPTIONS" - log.info "T1 Tempalte: $params.template_t1" - } - log.info "" - log.info "EDDY AND TOPUP OPTIONS" - log.info "Configuration for topup: $params.topup_config" - log.info "Encoding direction: $params.encoding_direction" - log.info "Readout: $params.readout" - log.info "Topup prefix: $params.topup_prefix" - log.info "Topup BET fractional value: $params.topup_bet_f" - log.info "Eddy command: $params.eddy_cmd" - log.info "Run slice drop correction: $params.use_slice_drop_correction" - log.info "" - log.info "NORMALIZE OPTIONS" - log.info "FA threshold for masking: $params.fa_mask_threshold" - log.info "" - log.info "RESAMPLE ANAT OPTIONS" - log.info "Resampling resolution for Anatomical file: $params.anat_resolution" - log.info "Interpolation method for Anatomical file: $params.anat_interpolation" - log.info "Interpolation method for masks: $params.mask_interpolation" - log.info "" - log.info "RESAMPLE DWI OPTIONS" - log.info "Resampling resolution for DWI: $params.dwi_resolution" - log.info "Interpolation method for DWI: $params.dwi_interpolation" - log.info "Interpolation method for DWI mask: $params.mask_dwi_interpolation" - log.info "" - log.info "EXTRACT DWI SHELLS OPTIONS" - if ( params.dti_shells ) { - log.info "DTI shells: $params.dti_shells" - } - else { - log.info "Maximum DTI shell value: $params.max_dti_shell_value" - } - log.info "" - log.info "SH FITTING OPTIONS" - log.info "Run SH fitting: $params.sh_fitting" - log.info "SH fitting order: $params.sh_fitting_order" - log.info "SH fitting basis: $params.sh_fitting_basis" - log.info "" - log.info "FODF OPTIONS" - log.info "Minimum fODF shell value: $params.min_fodf_shell_value" - log.info "FODF Metrics A factor: $params.fodf_metrics_a_factor" - log.info "Maximum FA value in ventricles: $params.max_fa_in_ventricle" - log.info "Minimum MD value in ventricles: $params.min_md_in_ventricle" - log.info "Relative threshold (RT): $params.relative_threshold" - log.info "SH basis: $params.basis" - log.info "SH order: $params.sh_order" - log.info "" - log.info "FRF OPTIONS" - log.info "Run mean FRF: $params.mean_frf" - log.info "FA threshold for single fiber voxel: $params.fa" - log.info "Minimum FA for selecting voxel: $params.min_fa" - log.info "Minimum number of voxels: $params.min_nvox" - log.info "ROI radius: $params.roi_radius" - log.info "Set FRF: $params.set_frf" - log.info "Manual FRF: $params.manual_frf" - log.info "" - log.info "SEGMENT TISSUES OPTIONS" - log.info "Number of tissues: $params.number_of_tissues" - log.info "" - log.info "SEEDING AND TRACKING OPTIONS" - if ( params.run_pft_tracking ) { - log.info "Algorithm for tracking: $params.pft_algo" - log.info "Number of seeds per voxel: $params.pft_nbr_seeds" - log.info "Seeding method: $params.pft_seeding" - log.info "Step size: $params.pft_step_size" - log.info "Theta threshold: $params.pft_theta" - log.info "Minimum fiber length: $params.pft_min_len" - log.info "Maximum fiber length: $params.pft_max_len" - log.info "Compression: $params.pft_compress_streamlines" - } - else { - log.info "Algorithm for tracking: $params.local_algo" - log.info "Number of seeds per voxel: $params.local_nbr_seeds" - log.info "Seeding method: $params.local_seeding" - log.info "Step size: $params.local_step_size" - log.info "Theta threshold: $params.local_theta" - log.info "Minimum fiber length: $params.local_min_len" - log.info "Maximum fiber length: $params.local_max_len" - log.info "Compression: $params.local_compress_streamlines" - } - log.info "" - log.info "PROCESSES PER TASKS" - if ( !params.infant_config ) { - log.info "Processes for denoising T1: $params.processes_denoise_t1" - log.info "Processes for BET T1: $params.processes_bet_t1" - } - log.info "Processes for denoising DWI: $params.processes_denoise_dwi" - log.info "Processes for EDDY: $params.processes_eddy" - log.info "Processes for registration: $params.processes_registration" - log.info "Processes for FODF: $params.processes_fodf" - log.info "" - } - - if ( params.run_freesurfer ) { - log.info "[Freesurfer Options]" - log.info "" - log.info "Atlas utils folder: $params.atlas_utils_folder" - log.info "Compute FS, BN, GL, SF: $params.compute_FS_BN_GL_SF" - log.info "Compute lobes: $params.compute_lobes" - log.info "Compute lausanne multiscale: $params.compute_lausanne_multiscale" - log.info "Number of threads: $params.nb_threads" - log.info "" - log.info "ATLAS SELECTION" - log.info "Use Freesurfer atlas: $params.use_freesurfer_atlas" - log.info "Use Brainnetome atlas: $params.use_brainnetome_atlas" - log.info "Use Glasser atlas: $params.use_glasser_atlas" - log.info "Use Schaefer 100 atlas: $params.use_schaefer_100_atlas" - log.info "Use Schaefer 200 atlas: $params.use_schaefer_200_atlas" - log.info "Use Schaefer 400 atlas: $params.use_schaefer_400_atlas" - log.info "Use Lausanne 1 atlas: $params.use_lausanne_1_atlas" - log.info "Use Lausanne 2 atlas: $params.use_lausanne_2_atlas" - log.info "Use Lausanne 3 atlas: $params.use_lausanne_3_atlas" - log.info "Use Lausanne 4 atlas: $params.use_lausanne_4_atlas" - log.info "Use Lausanne 5 atlas: $params.use_lausanne_5_atlas" - log.info "Use dilated labels: $params.use_dilated_labels" - log.info "" - } - - if ( params.run_connectomics ) { - log.info "[Connectomics Options]" - log.info "" - log.info "DECOMPOSE OPTIONS" - log.info "No pruning: $params.no_pruning" - log.info "No remove loops: $params.no_remove_loops" - log.info "No remove outliers: $params.no_remove_outliers" - log.info "Minimal length: $params.min_length" - log.info "Maximal length: $params.max_length" - log.info "Maximum looping angle: $params.loop_max_angle" - log.info "Outlier treshold: $params.outlier_threshold" - log.info "" - log.info "COMMIT OPTIONS" - log.info "Run COMMIT: $params.run_commit" - log.info "Use COMMIT2: $params.use_commit2" - log.info "COMMIT on trk: $params.commit_on_trk" - log.info "B-value threshold: $params.b_thr" - log.info "Number of directions: $params.nbr_dir" - log.info "Ball and stick: $params.ball_stick" - log.info "Parallel diffusivity: $params.para_diff" - log.info "Perpendicular diffusivity: $params.perp_diff" - log.info "Isotropic diffusivity: $params.iso_diff" - log.info "" - log.info "PROCESSES OPTIONS" - log.info "Number of processes for COMMIT: $params.processes_commit" - log.info "Number of processes for AFD_FIXEL: $params.processes_afd_fixel" - log.info "Number of processes for CONNECTIVITY: $params.processes_connectivity" - log.info "" - } } \ No newline at end of file diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index 1c73aeb..e994009 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -5,7 +5,11 @@ nextflow.enable.dsl=2 process COMMIT { cpus params.processes_commit memory params.commit_memory_limit - label "COMMIT" + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Connectomics/Commit/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Connectomics/Commit/", mode: 'symlink' + } input: tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks) @@ -43,9 +47,13 @@ process COMMIT { } process COMMIT_ON_TRK { - label "COMMIT" cpus params.processes_commit memory params.commit_memory_limit + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Connectomics/Commit/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Connectomics/Commit/", mode: 'symlink' + } input: tuple val(sid), path(trk_h5), path(dwi), path(bval), path(bvec), path(peaks) diff --git a/modules/connectomics/processes/compute_metrics.nf b/modules/connectomics/processes/compute_metrics.nf index a4d96c6..24636d5 100644 --- a/modules/connectomics/processes/compute_metrics.nf +++ b/modules/connectomics/processes/compute_metrics.nf @@ -5,7 +5,11 @@ nextflow.enable.dsl=2 process COMPUTE_AFD_FIXEL { cpus params.processes_afd_fixel memory '2 GB' - label "COMPUTE_AFD_FIXEL" + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Connectomics/AFD_Fixel/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Connectomics/AFD_Fixel/", mode: 'symlink' + } input: tuple val(sid), path(h5), path(fodf) @@ -21,7 +25,11 @@ process COMPUTE_AFD_FIXEL { process COMPUTE_CONNECTIVITY { cpus params.processes_connectivity memory '2 GB' - label "COMPUTE_CONNECTIVITY" + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Connectomics/Connectivity_Metrics/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Connectomics/Connectivity_Metrics/", mode: 'symlink' + } input: tuple val(sid), path(h5), path(labels), path(metrics) diff --git a/modules/connectomics/processes/decompose.nf b/modules/connectomics/processes/decompose.nf index a384319..06c17a6 100644 --- a/modules/connectomics/processes/decompose.nf +++ b/modules/connectomics/processes/decompose.nf @@ -5,7 +5,11 @@ nextflow.enable.dsl=2 process DECOMPOSE_CONNECTIVITY { cpus 1 memory { 7.B * trk.size() } - label "DECOMPOSE_CONNECTIVITY" + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Connectomics/Decompose/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Connectomics/Decompose/", mode: 'symlink' + } input: tuple val(sid), path(trk), path(labels) diff --git a/modules/connectomics/processes/transform.nf b/modules/connectomics/processes/transform.nf index 7d805db..5420f81 100644 --- a/modules/connectomics/processes/transform.nf +++ b/modules/connectomics/processes/transform.nf @@ -5,7 +5,11 @@ nextflow.enable.dsl=2 process TRANSFORM_LABELS { cpus 1 memory '2 GB' - label "TRANSFORM_LABELS" + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Connectomics/Transform_Labels/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Connectomics/Transform_Labels/", mode: 'symlink' + } input: tuple val(sid), path(labels), path(t2), path(mat), path(syn) @@ -23,6 +27,11 @@ process TRANSFORM_LABELS { process TRANSFORM_T1 { cpus 1 memory '2 GB' + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Connectomics/Transform_T1/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Connectomics/Transform_T1/", mode: 'symlink' + } input: tuple val(sid), path(t1), path(dwi), path(bval), path(bvec), path(mat), path(syn) diff --git a/modules/connectomics/processes/viz.nf b/modules/connectomics/processes/viz.nf index 29ae3d4..bacc907 100644 --- a/modules/connectomics/processes/viz.nf +++ b/modules/connectomics/processes/viz.nf @@ -4,8 +4,12 @@ nextflow.enable.dsl=2 process VISUALIZE_CONNECTIVITY { cpus 1 - label "VIZ" memory "2 GB" + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Connectomics/Visualize_Connectivity/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Connectomics/Visualize_Connectivity/", mode: 'symlink' + } input: tuple val(sid), path(npy) diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index 85f9635..c77352d 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -4,6 +4,11 @@ nextflow.enable.dsl=2 process FS_BN_GL_SF { cpus params.nb_threads + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Freesurfer/FS_BN_GL_SF/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Freesurfer/FS_BN_GL_SF/", mode: 'symlink' + } input: tuple val(sid), path(folder) @@ -32,6 +37,11 @@ process FS_BN_GL_SF { process LOBES { cpus params.nb_threads + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Freesurfer/Lobes/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Freesurfer/Lobes/", mode: 'symlink' + } input: tuple val(sid), path(folder) @@ -74,17 +84,18 @@ process LOBES { process LAUSANNE { cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Freesurfer/Lausanne/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Freesurfer/Lausanne/", mode: 'symlink' + } input: tuple val(sid), path(folder) each scale output: - tuple val(sid), path("lausanne_2008_scale_1*.nii.gz"), emit: lausanne_1 - tuple val(sid), path("lausanne_2008_scale_2*.nii.gz"), emit: lausanne_2 - tuple val(sid), path("lausanne_2008_scale_3*.nii.gz"), emit: lausanne_3 - tuple val(sid), path("lausanne_2008_scale_4*.nii.gz"), emit: lausanne_4 - tuple val(sid), path("lausanne_2008_scale_5*.nii.gz"), emit: lausanne_5 + tuple val(sid), path("lausanne_2008_scale_${scale}*.nii.gz"), emit: lausanne_${scale} path("*.txt") path("*.json") @@ -111,5 +122,3 @@ process LAUSANNE { cp $params.atlas_utils_folder/lausanne_multi_scale_atlas/*.json ./ """ } - - diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index 5700e3f..81e1904 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -4,6 +4,11 @@ nextflow.enable.dsl=2 process FREESURFER { cpus params.nb_threads + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Freesurfer/Freesurfer/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Freesurfer/Freesurfer/", mode: 'symlink' + } input: tuple val(sid), path(anat) diff --git a/modules/io.nf b/modules/io.nf index 4f48aef..3f45e2a 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -306,4 +306,199 @@ workflow get_data_template { fa = fa_channel anat_ref = anat_ref fa_ref = fa_ref +} + +def display_run_info () { + log.info "" + log.info "ChildBrainFlow pipeline" + log.info "========================" + log.info "ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. " + log.info "It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs " + log.info "between flows with some parameters tuned for pediatric brain scans. " + log.info "" + log.info "Start time: $workflow.start" + log.info "" + + log.debug "[Command-line]" + log.debug "$workflow.commandLine" + log.debug "" + + log.info "[Git Info]" + log.info "$workflow.repository - $workflow.revision [$workflow.commitId]" + log.info "" + + log.info "[Inputs]" + log.info "Input: $params.input" + log.info "Output Directory: $params.output_dir" + log.info "" + + if ( params.run_tracking ) { + log.info "[Tracking Options]" + log.info "" + log.info "GLOBAL OPTIONS" + log.info "Threshold for b0: $params.b0_thr" + log.info "DWI Shell Tolerance: $params.dwi_shell_tolerance" + log.info "Skip DWI preprocessing: $params.skip_dwi_preprocessing" + log.info "" + log.info "BET DWI OPTIONS" + log.info "Initial fractional value for BET: $params.initial_bet_f" + log.info "Finale fractional value for BET: $params.final_bet_f" + log.info "" + if ( params.infant_config ) { + log.info "BET T2W OPTIONS" + log.info "Run BET on T2W image: $params.run_bet_anat" + log.info "Fractional value for T2W BET: $params.bet_anat_f" + } + else { + log.info "BET T1W OPTIONS" + log.info "T1 Tempalte: $params.template_t1" + } + log.info "" + log.info "EDDY AND TOPUP OPTIONS" + log.info "Configuration for topup: $params.topup_config" + log.info "Encoding direction: $params.encoding_direction" + log.info "Readout: $params.readout" + log.info "Topup prefix: $params.topup_prefix" + log.info "Topup BET fractional value: $params.topup_bet_f" + log.info "Eddy command: $params.eddy_cmd" + log.info "Run slice drop correction: $params.use_slice_drop_correction" + log.info "" + log.info "NORMALIZE OPTIONS" + log.info "FA threshold for masking: $params.fa_mask_threshold" + log.info "" + log.info "RESAMPLE ANAT OPTIONS" + log.info "Resampling resolution for Anatomical file: $params.anat_resolution" + log.info "Interpolation method for Anatomical file: $params.anat_interpolation" + log.info "Interpolation method for masks: $params.mask_interpolation" + log.info "" + log.info "RESAMPLE DWI OPTIONS" + log.info "Resampling resolution for DWI: $params.dwi_resolution" + log.info "Interpolation method for DWI: $params.dwi_interpolation" + log.info "Interpolation method for DWI mask: $params.mask_dwi_interpolation" + log.info "" + log.info "EXTRACT DWI SHELLS OPTIONS" + if ( params.dti_shells ) { + log.info "DTI Shells: $params.dti_shells" + } else { + log.info "Maximum DTI shell value: $params.max_dti_shell_value" + } + log.info "" + log.info "SH FITTING OPTIONS" + log.info "Run SH fitting: $params.sh_fitting" + log.info "SH fitting order: $params.sh_fitting_order" + log.info "SH fitting basis: $params.sh_fitting_basis" + log.info "" + log.info "FODF OPTIONS" + if ( params.fodf_shells ) { + log.info "FODF Shells: $params.fodf_shells" + } else { + log.info "Minimum fODF shell value: $params.min_fodf_shell_value" + } + log.info "FODF Metrics A factor: $params.fodf_metrics_a_factor" + log.info "Maximum FA value in ventricles: $params.max_fa_in_ventricle" + log.info "Minimum MD value in ventricles: $params.min_md_in_ventricle" + log.info "Relative threshold (RT): $params.relative_threshold" + log.info "SH basis: $params.basis" + log.info "SH order: $params.sh_order" + log.info "" + log.info "FRF OPTIONS" + log.info "Run mean FRF: $params.mean_frf" + log.info "FA threshold for single fiber voxel: $params.fa" + log.info "Minimum FA for selecting voxel: $params.min_fa" + log.info "Minimum number of voxels: $params.min_nvox" + log.info "ROI radius: $params.roi_radius" + log.info "Set FRF: $params.set_frf" + log.info "Manual FRF: $params.manual_frf" + log.info "" + log.info "SEGMENT TISSUES OPTIONS" + log.info "Number of tissues: $params.number_of_tissues" + log.info "" + log.info "SEEDING AND TRACKING OPTIONS" + if ( params.run_pft_tracking ) { + log.info "Algorithm for tracking: $params.pft_algo" + log.info "Number of seeds per voxel: $params.pft_nbr_seeds" + log.info "Seeding method: $params.pft_seeding" + log.info "Step size: $params.pft_step_size" + log.info "Theta threshold: $params.pft_theta" + log.info "Minimum fiber length: $params.pft_min_len" + log.info "Maximum fiber length: $params.pft_max_len" + log.info "Compression: $params.pft_compress_streamlines" + } + else { + log.info "Algorithm for tracking: $params.local_algo" + log.info "Number of seeds per voxel: $params.local_nbr_seeds" + log.info "Seeding method: $params.local_seeding" + log.info "Step size: $params.local_step_size" + log.info "Theta threshold: $params.local_theta" + log.info "Minimum fiber length: $params.local_min_len" + log.info "Maximum fiber length: $params.local_max_len" + log.info "Compression: $params.local_compress_streamlines" + } + log.info "" + log.info "PROCESSES PER TASKS" + if ( !params.infant_config ) { + log.info "Processes for denoising T1: $params.processes_denoise_t1" + log.info "Processes for BET T1: $params.processes_bet_t1" + } + log.info "Processes for denoising DWI: $params.processes_denoise_dwi" + log.info "Processes for EDDY: $params.processes_eddy" + log.info "Processes for registration: $params.processes_registration" + log.info "Processes for FODF: $params.processes_fodf" + log.info "" + } + + if ( params.run_freesurfer ) { + log.info "[Freesurfer Options]" + log.info "" + log.info "Atlas utils folder: $params.atlas_utils_folder" + log.info "Compute FS, BN, GL, SF: $params.compute_FS_BN_GL_SF" + log.info "Compute lobes: $params.compute_lobes" + log.info "Compute lausanne multiscale: $params.compute_lausanne_multiscale" + log.info "Number of threads: $params.nb_threads" + log.info "" + log.info "ATLAS SELECTION" + log.info "Use Freesurfer atlas: $params.use_freesurfer_atlas" + log.info "Use Brainnetome atlas: $params.use_brainnetome_atlas" + log.info "Use Glasser atlas: $params.use_glasser_atlas" + log.info "Use Schaefer 100 atlas: $params.use_schaefer_100_atlas" + log.info "Use Schaefer 200 atlas: $params.use_schaefer_200_atlas" + log.info "Use Schaefer 400 atlas: $params.use_schaefer_400_atlas" + log.info "Use Lausanne 1 atlas: $params.use_lausanne_1_atlas" + log.info "Use Lausanne 2 atlas: $params.use_lausanne_2_atlas" + log.info "Use Lausanne 3 atlas: $params.use_lausanne_3_atlas" + log.info "Use Lausanne 4 atlas: $params.use_lausanne_4_atlas" + log.info "Use Lausanne 5 atlas: $params.use_lausanne_5_atlas" + log.info "Use dilated labels: $params.use_dilated_labels" + log.info "" + } + + if ( params.run_connectomics ) { + log.info "[Connectomics Options]" + log.info "" + log.info "DECOMPOSE OPTIONS" + log.info "No pruning: $params.no_pruning" + log.info "No remove loops: $params.no_remove_loops" + log.info "No remove outliers: $params.no_remove_outliers" + log.info "Minimal length: $params.min_length" + log.info "Maximal length: $params.max_length" + log.info "Maximum looping angle: $params.loop_max_angle" + log.info "Outlier treshold: $params.outlier_threshold" + log.info "" + log.info "COMMIT OPTIONS" + log.info "Run COMMIT: $params.run_commit" + log.info "Use COMMIT2: $params.use_commit2" + log.info "COMMIT on trk: $params.commit_on_trk" + log.info "B-value threshold: $params.b_thr" + log.info "Number of directions: $params.nbr_dir" + log.info "Ball and stick: $params.ball_stick" + log.info "Parallel diffusivity: $params.para_diff" + log.info "Perpendicular diffusivity: $params.perp_diff" + log.info "Isotropic diffusivity: $params.iso_diff" + log.info "" + log.info "PROCESSES OPTIONS" + log.info "Number of processes for COMMIT: $params.processes_commit" + log.info "Number of processes for AFD_FIXEL: $params.processes_afd_fixel" + log.info "Number of processes for CONNECTIVITY: $params.processes_connectivity" + log.info "" + } } \ No newline at end of file diff --git a/modules/tracking/processes/DTI_processes.nf b/modules/tracking/processes/DTI_processes.nf index 3d8577b..9cebd4b 100644 --- a/modules/tracking/processes/DTI_processes.nf +++ b/modules/tracking/processes/DTI_processes.nf @@ -3,8 +3,12 @@ nextflow.enable.dsl=2 process EXTRACT_DTI_SHELL { - label "EXTRACT_DTI_SHELL" cpus 3 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DTI/DTI_Shells/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DTI/DTI_Shells/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -13,32 +17,36 @@ process EXTRACT_DTI_SHELL { path("${sid}__bvec_dti"), emit: dti_files script: if (params.dti_shells) - """ + """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 scil_extract_dwi_shell.py $dwi \ - $bval $bvec $params.dti_shells ${sid}__dwi_dti.nii.gz \ - ${sid}__bval_dti ${sid}__bvec_dti -t $params.dwi_shell_tolerance -f - """ + $bval $bvec $params.dti_shells ${sid}__dwi_dti.nii.gz \ + ${sid}__bval_dti ${sid}__bvec_dti -t $params.dwi_shell_tolerance -f + """ else - """ + """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 shells=\$(awk -v max="$params.max_dti_shell_value" '{for (i = 1; i <= NF; i++) {v = int(\$i);if (v <= max) shells[v] = 1;}}END {for (v in shells) print v;}' "$bval" |\ - sort -n | tr '\n' ' ') + sort -n | tr '\n' ' ') scil_extract_dwi_shell.py $dwi \ - $bval $bvec \$shells ${sid}__dwi_dti.nii.gz \ - ${sid}__bval_dti ${sid}__bvec_dti -t $params.dwi_shell_tolerance -f - """ + $bval $bvec \$shells ${sid}__dwi_dti.nii.gz \ + ${sid}__bval_dti ${sid}__bvec_dti -t $params.dwi_shell_tolerance -f + """ } process DTI_METRICS { - label "DTI_METRICS" cpus 3 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DTI/DTI_Metrics/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DTI/DTI_Metrics/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) diff --git a/modules/tracking/processes/FODF_processes.nf b/modules/tracking/processes/FODF_processes.nf index fbd4329..390a7dd 100644 --- a/modules/tracking/processes/FODF_processes.nf +++ b/modules/tracking/processes/FODF_processes.nf @@ -3,8 +3,12 @@ nextflow.enable.dsl=2 process FODF_SHELL { - label "FODF" cpus 3 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/FODF/FODF_Shells/", mode: 'copy' + } else { + publishDir "${params.output_dir}/FODF/FODF_Shells/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -13,36 +17,40 @@ process FODF_SHELL { path("${sid}__bvec_fodf"), emit: dwi_fodf script: if (params.fodf_shells) - """ + """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 scil_extract_dwi_shell.py $dwi \ - $bval $bvec $params.fodf_shells ${sid}__dwi_fodf.nii.gz \ - ${sid}__bval_fodf ${sid}__bvec_fodf -t $params.dwi_shell_tolerance -f - """ + $bval $bvec $params.fodf_shells ${sid}__dwi_fodf.nii.gz \ + ${sid}__bval_fodf ${sid}__bvec_fodf -t $params.dwi_shell_tolerance -f + """ else - """ - export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 - export OMP_NUM_THREADS=1 - export OPENBLAS_NUM_THREADS=1 + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 - shells=\$(awk -v min_fodf="$params.min_fodf_shell_value" -v b0_thr="$params.b0_thr" '{for (i = 1; i <= NF; i++) - {v = int(\$i);if (v >= min_fodf || v <= b0_thr) shells[v] = 1;}} - END { - for (v in shells) print v; - } - ' "$bval" | sort -n | tr '\n' ' ') + shells=\$(awk -v min_fodf="$params.min_fodf_shell_value" -v b0_thr="$params.b0_thr" '{for (i = 1; i <= NF; i++) + {v = int(\$i);if (v >= min_fodf || v <= b0_thr) shells[v] = 1;}} + END { + for (v in shells) print v; + } + ' "$bval" | sort -n | tr '\n' ' ') - scil_extract_dwi_shell.py $dwi \ + scil_extract_dwi_shell.py $dwi \ $bval $bvec \$shells ${sid}__dwi_fodf.nii.gz \ ${sid}__bval_fodf ${sid}__bvec_fodf -t $params.dwi_shell_tolerance -f - """ + """ } process COMPUTE_FRF { - label "FRF" cpus 3 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/FRF/Compute_FRF/", mode: 'copy' + } else { + publishDir "${params.output_dir}/FRF/Compute_FRF/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -71,9 +79,12 @@ process COMPUTE_FRF { } process MEAN_FRF { - label "FRF" cpus 1 - publishDir = params.Mean_FRF_Publish_Dir + if ( ! params.symlink ) { + publishDir "${params.Mean_FRF_Publish_Dir}/", mode: 'copy' + } else { + publishDir "${params.Mean_FRF_Publish_Dir}/", mode: 'symlink' + } input: path(all_frf) @@ -89,8 +100,12 @@ process MEAN_FRF { } process FODF_METRICS { - label "FODF" cpus params.processes_fodf + if ( ! params.symlink ) { + publishDir "${params.output_dir}/FODF/FODF_Metrics/", mode: 'copy' + } else { + publishDir "${params.output_dir}/FODF/FODF_Metrics/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask), path(fa), path(md), path(frf) diff --git a/modules/tracking/processes/SH_processes.nf b/modules/tracking/processes/SH_processes.nf index 882af77..143fa42 100644 --- a/modules/tracking/processes/SH_processes.nf +++ b/modules/tracking/processes/SH_processes.nf @@ -3,8 +3,12 @@ nextflow.enable.dsl=2 process SH_FITTING_SHELL { - label "SH_FITTING" cpus 3 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/SH/SH_Shells/", mode: 'copy' + } else { + publishDir "${params.output_dir}/SH/SH_Shells/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -23,8 +27,12 @@ process SH_FITTING_SHELL { } process SH_FITTING { - label "SH_FITTING" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/SH/SH_Fitting/", mode: 'copy' + } else { + publishDir "${params.output_dir}/SH/SH_Fitting/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec) diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index bb5c194..707a212 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -3,8 +3,12 @@ nextflow.enable.dsl=2 process BET_DWI { - label "BET" cpus 2 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/Bet/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/Bet/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -39,8 +43,12 @@ process BET_DWI { } process BET_T2 { - label "BET" cpus 2 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/ANAT/Bet/", mode: 'copy' + } else { + publishDir "${params.output_dir}/ANAT/Bet/", mode: 'symlink' + } input: tuple val(sid), path(anat) @@ -58,8 +66,12 @@ process BET_T2 { } process DENOISING { - label "DENOISING" cpus params.processes_denoise_dwi + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/Denoising/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/Denoising/", mode: 'symlink' + } input: tuple val(sid), path(dwi) @@ -79,8 +91,12 @@ process DENOISING { } process TOPUP { - label "TOPUP" cpus 4 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/Topup/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/Topup/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(revb0) @@ -113,9 +129,13 @@ process TOPUP { } process EDDY_TOPUP { - label "EDDY_TOPUP" cpus params.processes_eddy memory { 5.GB * task.attempt } + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/Eddy_Topup/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/Eddy_Topup/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0s_corrected), path(field), path(movpar) @@ -152,8 +172,12 @@ process EDDY_TOPUP { } process N4 { - label "N4" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/N4/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/N4/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -179,8 +203,12 @@ process N4 { } process CROP_DWI { - label "CROP_VOLUMES" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/Crop/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/Crop/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(b0_mask) @@ -203,8 +231,12 @@ process CROP_DWI { } process DENOISE_T1 { - label "DENOISE_T1" cpus params.processes_denoise_t1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/ANAT/Denoising/", mode: 'copy' + } else { + publishDir "${params.output_dir}/ANAT/Denoising/", mode: 'symlink' + } input: tuple val(sid), path(t1) @@ -224,8 +256,12 @@ process DENOISE_T1 { } process N4_T1 { - label "N4_T1" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/ANAT/N4/", mode: 'copy' + } else { + publishDir "${params.output_dir}/ANAT/N4/", mode: 'symlink' + } input: tuple val(sid), path(t1) @@ -246,8 +282,12 @@ process N4_T1 { } process CROP_ANAT { - label "CROP_VOLUMES" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/ANAT/Crop/", mode: 'copy' + } else { + publishDir "${params.output_dir}/ANAT/Crop/", mode: 'symlink' + } input: tuple val(sid), path(t2w), path(mask) @@ -268,8 +308,12 @@ process CROP_ANAT { } process RESAMPLE_T1 { - label "RESAMPLE_T1" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/ANAT/Resample/", mode: 'copy' + } else { + publishDir "${params.output_dir}/ANAT/Resample/", mode: 'symlink' + } input: tuple val(sid), path(t1) @@ -290,8 +334,12 @@ process RESAMPLE_T1 { } process BET_T1 { - label "BET_T1" cpus params.processes_bet_t1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/ANAT/Bet/", mode: 'copy' + } else { + publishDir "${params.output_dir}/ANAT/Bet/", mode: 'symlink' + } input: tuple val(sid), path(t1) @@ -317,8 +365,12 @@ process BET_T1 { } process RESAMPLE_ANAT { - label "RESAMPLE_VOLUMES" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/ANAT/Resample/", mode: 'copy' + } else { + publishDir "${params.output_dir}/ANAT/Resample/", mode: 'symlink' + } input: tuple val(sid), path(t2w), path(mask) @@ -343,8 +395,12 @@ process RESAMPLE_ANAT { } process NORMALIZE { - label "NORMALIZE_DWI" cpus 3 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/Normalize/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/Normalize/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -387,8 +443,12 @@ process NORMALIZE { } process RESAMPLE_DWI { - label "RESAMPLE_DWI" cpus 3 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/Resample/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/Resample/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(mask) @@ -412,8 +472,12 @@ process RESAMPLE_DWI { } process EXTRACT_B0 { - label "EXTRACT_B0" cpus 3 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/B0/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/B0/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -433,6 +497,11 @@ process EXTRACT_B0 { process DWI_MASK { cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/DWI/Mask/", mode: 'copy' + } else { + publishDir "${params.output_dir}/DWI/Mask/", mode: 'symlink' + } input: tuple val(sid), path(dwi), path(bval), path(bvec) diff --git a/modules/tracking/processes/registration_processes.nf b/modules/tracking/processes/registration_processes.nf index d74941d..727cc9d 100644 --- a/modules/tracking/processes/registration_processes.nf +++ b/modules/tracking/processes/registration_processes.nf @@ -3,8 +3,12 @@ nextflow.enable.dsl=2 process REGISTER_T2 { - label "REGISTER_T2" cpus params.processes_registration + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Registration/Register_Anat/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Registration/Register_Anat/", mode: 'symlink' + } input: tuple val(sid), path(md), path(t2w), path(wm_mask) @@ -56,8 +60,12 @@ process REGISTER_T2 { } process REGISTER_T1 { - label "REGISTER_ANAT" cpus params.processes_registration + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Registration/Register_Anat/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Registration/Register_Anat/", mode: 'symlink' + } input: tuple val(sid), path(fa), path(t1), path(t1_mask), path(b0) diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index cb082db..5eb3237 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -3,8 +3,12 @@ nextflow.enable.dsl=2 process SEGMENT_TISSUES { - label "SEGMENTATION" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Tracking/Segmentation/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Tracking/Segmentation/", mode: 'symlink' + } input: tuple val(sid), path(anat) @@ -31,8 +35,12 @@ process SEGMENT_TISSUES { } process GENERATE_MASKS { - label "GENERATE_MASKS" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + } input: tuple val(sid), path(wm_mask), path(fa) @@ -56,8 +64,12 @@ process GENERATE_MASKS { } process LOCAL_TRACKING_MASK { - label "LOCAL_TRACKING" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + } input: tuple val(sid), path(wm), path(fa) @@ -82,8 +94,12 @@ process LOCAL_TRACKING_MASK { } process LOCAL_SEEDING_MASK { - label "LOCAL_TRACKING" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + } input: tuple val(sid), path(wm), path(fa) @@ -108,8 +124,12 @@ process LOCAL_SEEDING_MASK { } process LOCAL_TRACKING { - label "LOCAL_TRACKING" cpus 2 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Tracking/Tracking/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Tracking/Tracking/", mode: 'symlink' + } input: tuple val(sid), path(fodf), path(seeding_mask), path(tracking_mask) @@ -135,8 +155,12 @@ process LOCAL_TRACKING { } process PFT_SEEDING_MASK { - label "PFT_TRACKING" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + } input: tuple val(sid), path(wm), path(fa), path(interface_mask) @@ -169,8 +193,12 @@ process PFT_SEEDING_MASK { } process PFT_TRACKING_MASK { - label "PFT_TRACKING" cpus 1 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + } input: tuple val(sid), path(wm), path(gm), path(csf) @@ -193,8 +221,12 @@ process PFT_TRACKING_MASK { } process PFT_TRACKING { - label "PFT_TRACKING" cpus 2 + if ( ! params.symlink ) { + publishDir "${params.output_dir}/Tracking/Tracking/", mode: 'copy' + } else { + publishDir "${params.output_dir}/Tracking/Tracking/", mode: 'symlink' + } input: tuple val(sid), path(fodf), path(include), path(exclude), path(seed) diff --git a/nextflow.config b/nextflow.config index 473bf4c..98f248a 100644 --- a/nextflow.config +++ b/nextflow.config @@ -54,6 +54,7 @@ params { mask_dwi_interpolation = "nn" // EXTRACT_DTI_SHELLS Options + dti_shells = false max_dti_shell_value = 1200 // SH_FITTING_SHELL Options @@ -62,6 +63,7 @@ params { sh_fitting_basis = "descoteaux07" // FODF Options + fodf_shells = false min_fodf_shell_value = 700 fodf_metrics_a_factor = 2.0 max_fa_in_ventricle = 0.1 @@ -162,9 +164,6 @@ params { processes_connectivity = 4 params.commit_memory_limit = '6.GB' - // Output Directory - output_dir = false - // Profiles Options run_freesurfer = false run_tracking = false @@ -196,10 +195,13 @@ params { compute_FS_BN_GL_SF = true compute_lausanne_multiscale = true compute_lobes = false + + // ** Output Options ** // + output_dir = "./Results_ChildBrainFlow/" + symlink = true } -if(params.output_dir) { - process.publishDir = {"$params.output_dir/$sid/$task.process"} +if ( params.output_dir ) { params.Mean_FRF_Publish_Dir = "${params.output_dir}/Mean_FRF" params.Pop_Avg_Publish_Dir = "${params.output_dir}/Pop_Avg" } @@ -221,9 +223,7 @@ singularity.autoMounts = true profiles { no_symlink { - process{ - publishDir = [path: {"./Results_ChildBrainFlow/$sid/$task.process"}, mode: 'copy'] - } + symlink = false } macos { From 65dd8e27465aa9708414c69e7dab02be9198cd32 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Fri, 3 Nov 2023 10:07:47 -0400 Subject: [PATCH 33/54] fix lausanne atlas --- modules/freesurfer/processes/atlases.nf | 6 ++- .../freesurfer/workflows/freesurferflow.nf | 42 ++++++++++--------- 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index c77352d..7819f7f 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -95,7 +95,11 @@ process LAUSANNE { each scale output: - tuple val(sid), path("lausanne_2008_scale_${scale}*.nii.gz"), emit: lausanne_${scale} + tuple val(sid), path("lausanne_2008_scale_1*.nii.gz"), emit: lausanne_1, optional: true + tuple val(sid), path("lausanne_2008_scale_2*.nii.gz"), emit: lausanne_2, optional: true + tuple val(sid), path("lausanne_2008_scale_3*.nii.gz"), emit: lausanne_3, optional: true + tuple val(sid), path("lausanne_2008_scale_4*.nii.gz"), emit: lausanne_4, optional: true + tuple val(sid), path("lausanne_2008_scale_5*.nii.gz"), emit: lausanne_5, optional: true path("*.txt") path("*.json") diff --git a/modules/freesurfer/workflows/freesurferflow.nf b/modules/freesurfer/workflows/freesurferflow.nf index 4780e27..6aadda8 100644 --- a/modules/freesurfer/workflows/freesurferflow.nf +++ b/modules/freesurfer/workflows/freesurferflow.nf @@ -31,6 +31,18 @@ workflow FREESURFERFLOW { LAUSANNE(FREESURFER.out.folders, scales) + // ** Reorganizing Lausanne multiscale atlas channel ** // + lausanne1 = LAUSANNE.out.lausanne_1.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_1.map{ [it[1]] }.flatMap()) + lausanne2 = LAUSANNE.out.lausanne_2.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_2.map{ [it[1]] }.flatMap()) + lausanne3 = LAUSANNE.out.lausanne_3.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_3.map{ [it[1]] }.flatMap()) + lausanne4 = LAUSANNE.out.lausanne_4.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_4.map{ [it[1]] }.flatMap()) + lausanne5 = LAUSANNE.out.lausanne_5.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_5.map{ [it[1]] }.flatMap()) + // ** Work out a way for the user to select which atlas to use. ** // // ** Could be cleaner than a bunch of if statements in the future. ** // if ( params.use_freesurfer_atlas ) { @@ -83,43 +95,33 @@ workflow FREESURFERFLOW { } } else if ( params.use_lausanne_1_atlas ) { if ( params.use_dilated_labels ) { - labels = LAUSANNE.out.lausanne_1 - .map{ [it[0], it[2]] } + labels = lausanne1.map{ [it[0], it[2]] } } else { - labels = LAUSANNE.out.lausanne_1 - .map{ [it[0], it[1]] } + labels = lausanne1.map{ [it[0], it[1]] } } } else if ( params.use_lausanne_2_atlas ) { if ( params.use_dilated_labels ) { - labels = LAUSANNE.out.lausanne_2 - .map{ [it[0], it[2]] } + labels = lausanne2.map{ [it[0], it[2]] } } else { - labels = LAUSANNE.out.lausanne_2 - .map{ [it[0], it[1]] } + labels = lausanne2.map{ [it[0], it[1]] } } } else if ( params.use_lausanne_3_atlas ) { if ( params.use_dilated_labels ) { - labels = LAUSANNE.out.lausanne_3 - .map{ [it[0], it[2]] } + labels = lausanne3.map{ [it[0], it[2]] } } else { - labels = LAUSANNE.out.lausanne_3 - .map{ [it[0], it[1]] } + labels = lausanne3.map{ [it[0], it[1]] } } } else if ( params.use_lausanne_4_atlas ) { if ( params.use_dilated_labels ) { - labels = LAUSANNE.out.lausanne_4 - .map{ [it[0], it[2]] } + labels = lausanne4.map{ [it[0], it[2]] } } else { - labels = LAUSANNE.out.lausanne_4 - .map{ [it[0], it[1]] } + labels = lausanne4.map{ [it[0], it[1]] } } } else if ( params.use_lausanne_5_atlas ) { if ( params.use_dilated_labels ) { - labels = LAUSANNE.out.lausanne_5 - .map{ [it[0], it[2]] } + labels = lausanne5.map{ [it[0], it[2]] } } else { - labels = LAUSANNE.out.lausanne_5 - .map{ [it[0], it[1]] } + labels = lausanne5.map{ [it[0], it[1]] } } } From a3dccf5b3a91ea53f1a215529bf22f389f33df7a Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 6 Nov 2023 09:45:10 -0500 Subject: [PATCH 34/54] fix output structure --- modules/connectomics/processes/commit.nf | 8 +-- .../connectomics/processes/compute_metrics.nf | 8 +-- modules/connectomics/processes/decompose.nf | 4 +- modules/connectomics/processes/transform.nf | 8 +-- modules/connectomics/processes/viz.nf | 4 +- modules/freesurfer/processes/atlases.nf | 12 ++-- modules/freesurfer/processes/freesurfer.nf | 4 +- modules/tracking/processes/DTI_processes.nf | 8 +-- modules/tracking/processes/FODF_processes.nf | 12 ++-- modules/tracking/processes/SH_processes.nf | 8 +-- modules/tracking/processes/preprocess.nf | 68 +++++++++---------- .../processes/registration_processes.nf | 8 +-- .../tracking/processes/tracking_processes.nf | 32 ++++----- 13 files changed, 92 insertions(+), 92 deletions(-) diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index e994009..27bc852 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -6,9 +6,9 @@ process COMMIT { cpus params.processes_commit memory params.commit_memory_limit if ( ! params.symlink ) { - publishDir "${params.output_dir}/Connectomics/Commit/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Connectomics/Commit/", mode: 'copy' } else { - publishDir "${params.output_dir}/Connectomics/Commit/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Connectomics/Commit/", mode: 'symlink' } input: @@ -50,9 +50,9 @@ process COMMIT_ON_TRK { cpus params.processes_commit memory params.commit_memory_limit if ( ! params.symlink ) { - publishDir "${params.output_dir}/Connectomics/Commit/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Connectomics/Commit/", mode: 'copy' } else { - publishDir "${params.output_dir}/Connectomics/Commit/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Connectomics/Commit/", mode: 'symlink' } input: diff --git a/modules/connectomics/processes/compute_metrics.nf b/modules/connectomics/processes/compute_metrics.nf index 24636d5..6824b7c 100644 --- a/modules/connectomics/processes/compute_metrics.nf +++ b/modules/connectomics/processes/compute_metrics.nf @@ -6,9 +6,9 @@ process COMPUTE_AFD_FIXEL { cpus params.processes_afd_fixel memory '2 GB' if ( ! params.symlink ) { - publishDir "${params.output_dir}/Connectomics/AFD_Fixel/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Connectomics/AFD_Fixel/", mode: 'copy' } else { - publishDir "${params.output_dir}/Connectomics/AFD_Fixel/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Connectomics/AFD_Fixel/", mode: 'symlink' } input: @@ -26,9 +26,9 @@ process COMPUTE_CONNECTIVITY { cpus params.processes_connectivity memory '2 GB' if ( ! params.symlink ) { - publishDir "${params.output_dir}/Connectomics/Connectivity_Metrics/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Connectomics/Connectivity_Metrics/", mode: 'copy' } else { - publishDir "${params.output_dir}/Connectomics/Connectivity_Metrics/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Connectomics/Connectivity_Metrics/", mode: 'symlink' } input: diff --git a/modules/connectomics/processes/decompose.nf b/modules/connectomics/processes/decompose.nf index 06c17a6..f566c17 100644 --- a/modules/connectomics/processes/decompose.nf +++ b/modules/connectomics/processes/decompose.nf @@ -6,9 +6,9 @@ process DECOMPOSE_CONNECTIVITY { cpus 1 memory { 7.B * trk.size() } if ( ! params.symlink ) { - publishDir "${params.output_dir}/Connectomics/Decompose/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Connectomics/Decompose/", mode: 'copy' } else { - publishDir "${params.output_dir}/Connectomics/Decompose/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Connectomics/Decompose/", mode: 'symlink' } input: diff --git a/modules/connectomics/processes/transform.nf b/modules/connectomics/processes/transform.nf index 5420f81..3e6d1a1 100644 --- a/modules/connectomics/processes/transform.nf +++ b/modules/connectomics/processes/transform.nf @@ -6,9 +6,9 @@ process TRANSFORM_LABELS { cpus 1 memory '2 GB' if ( ! params.symlink ) { - publishDir "${params.output_dir}/Connectomics/Transform_Labels/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Connectomics/Transform_Labels/", mode: 'copy' } else { - publishDir "${params.output_dir}/Connectomics/Transform_Labels/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Connectomics/Transform_Labels/", mode: 'symlink' } input: @@ -28,9 +28,9 @@ process TRANSFORM_T1 { cpus 1 memory '2 GB' if ( ! params.symlink ) { - publishDir "${params.output_dir}/Connectomics/Transform_T1/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Connectomics/Transform_T1/", mode: 'copy' } else { - publishDir "${params.output_dir}/Connectomics/Transform_T1/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Connectomics/Transform_T1/", mode: 'symlink' } input: diff --git a/modules/connectomics/processes/viz.nf b/modules/connectomics/processes/viz.nf index bacc907..6421153 100644 --- a/modules/connectomics/processes/viz.nf +++ b/modules/connectomics/processes/viz.nf @@ -6,9 +6,9 @@ process VISUALIZE_CONNECTIVITY { cpus 1 memory "2 GB" if ( ! params.symlink ) { - publishDir "${params.output_dir}/Connectomics/Visualize_Connectivity/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Connectomics/Visualize_Connectivity/", mode: 'copy' } else { - publishDir "${params.output_dir}/Connectomics/Visualize_Connectivity/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Connectomics/Visualize_Connectivity/", mode: 'symlink' } input: diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index 7819f7f..82afc7c 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -5,9 +5,9 @@ nextflow.enable.dsl=2 process FS_BN_GL_SF { cpus params.nb_threads if ( ! params.symlink ) { - publishDir "${params.output_dir}/Freesurfer/FS_BN_GL_SF/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Freesurfer/FS_BN_GL_SF/", mode: 'copy' } else { - publishDir "${params.output_dir}/Freesurfer/FS_BN_GL_SF/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Freesurfer/FS_BN_GL_SF/", mode: 'symlink' } input: @@ -38,9 +38,9 @@ process FS_BN_GL_SF { process LOBES { cpus params.nb_threads if ( ! params.symlink ) { - publishDir "${params.output_dir}/Freesurfer/Lobes/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Freesurfer/Lobes/", mode: 'copy' } else { - publishDir "${params.output_dir}/Freesurfer/Lobes/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Freesurfer/Lobes/", mode: 'symlink' } input: @@ -85,9 +85,9 @@ process LOBES { process LAUSANNE { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/Freesurfer/Lausanne/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Freesurfer/Lausanne/", mode: 'copy' } else { - publishDir "${params.output_dir}/Freesurfer/Lausanne/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Freesurfer/Lausanne/", mode: 'symlink' } input: diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index 81e1904..9424718 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -5,9 +5,9 @@ nextflow.enable.dsl=2 process FREESURFER { cpus params.nb_threads if ( ! params.symlink ) { - publishDir "${params.output_dir}/Freesurfer/Freesurfer/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Freesurfer/Freesurfer/", mode: 'copy' } else { - publishDir "${params.output_dir}/Freesurfer/Freesurfer/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Freesurfer/Freesurfer/", mode: 'symlink' } input: diff --git a/modules/tracking/processes/DTI_processes.nf b/modules/tracking/processes/DTI_processes.nf index 9cebd4b..f7be5a3 100644 --- a/modules/tracking/processes/DTI_processes.nf +++ b/modules/tracking/processes/DTI_processes.nf @@ -5,9 +5,9 @@ nextflow.enable.dsl=2 process EXTRACT_DTI_SHELL { cpus 3 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DTI/DTI_Shells/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DTI/DTI_Shells/", mode: 'copy' } else { - publishDir "${params.output_dir}/DTI/DTI_Shells/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DTI/DTI_Shells/", mode: 'symlink' } input: @@ -43,9 +43,9 @@ process EXTRACT_DTI_SHELL { process DTI_METRICS { cpus 3 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DTI/DTI_Metrics/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DTI/DTI_Metrics/", mode: 'copy' } else { - publishDir "${params.output_dir}/DTI/DTI_Metrics/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DTI/DTI_Metrics/", mode: 'symlink' } input: diff --git a/modules/tracking/processes/FODF_processes.nf b/modules/tracking/processes/FODF_processes.nf index 390a7dd..130d2a8 100644 --- a/modules/tracking/processes/FODF_processes.nf +++ b/modules/tracking/processes/FODF_processes.nf @@ -5,9 +5,9 @@ nextflow.enable.dsl=2 process FODF_SHELL { cpus 3 if ( ! params.symlink ) { - publishDir "${params.output_dir}/FODF/FODF_Shells/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/FODF/FODF_Shells/", mode: 'copy' } else { - publishDir "${params.output_dir}/FODF/FODF_Shells/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/FODF/FODF_Shells/", mode: 'symlink' } input: @@ -47,9 +47,9 @@ process FODF_SHELL { process COMPUTE_FRF { cpus 3 if ( ! params.symlink ) { - publishDir "${params.output_dir}/FRF/Compute_FRF/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/FRF/Compute_FRF/", mode: 'copy' } else { - publishDir "${params.output_dir}/FRF/Compute_FRF/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/FRF/Compute_FRF/", mode: 'symlink' } input: @@ -102,9 +102,9 @@ process MEAN_FRF { process FODF_METRICS { cpus params.processes_fodf if ( ! params.symlink ) { - publishDir "${params.output_dir}/FODF/FODF_Metrics/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/FODF/FODF_Metrics/", mode: 'copy' } else { - publishDir "${params.output_dir}/FODF/FODF_Metrics/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/FODF/FODF_Metrics/", mode: 'symlink' } input: diff --git a/modules/tracking/processes/SH_processes.nf b/modules/tracking/processes/SH_processes.nf index 143fa42..23427fe 100644 --- a/modules/tracking/processes/SH_processes.nf +++ b/modules/tracking/processes/SH_processes.nf @@ -5,9 +5,9 @@ nextflow.enable.dsl=2 process SH_FITTING_SHELL { cpus 3 if ( ! params.symlink ) { - publishDir "${params.output_dir}/SH/SH_Shells/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/SH/SH_Shells/", mode: 'copy' } else { - publishDir "${params.output_dir}/SH/SH_Shells/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/SH/SH_Shells/", mode: 'symlink' } input: @@ -29,9 +29,9 @@ process SH_FITTING_SHELL { process SH_FITTING { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/SH/SH_Fitting/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/SH/SH_Fitting/", mode: 'copy' } else { - publishDir "${params.output_dir}/SH/SH_Fitting/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/SH/SH_Fitting/", mode: 'symlink' } input: diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index 707a212..d2a0aa7 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -5,9 +5,9 @@ nextflow.enable.dsl=2 process BET_DWI { cpus 2 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/Bet/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/Bet/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/Bet/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/Bet/", mode: 'symlink' } input: @@ -45,9 +45,9 @@ process BET_DWI { process BET_T2 { cpus 2 if ( ! params.symlink ) { - publishDir "${params.output_dir}/ANAT/Bet/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/ANAT/Bet/", mode: 'copy' } else { - publishDir "${params.output_dir}/ANAT/Bet/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/ANAT/Bet/", mode: 'symlink' } input: @@ -68,9 +68,9 @@ process BET_T2 { process DENOISING { cpus params.processes_denoise_dwi if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/Denoising/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/Denoising/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/Denoising/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/Denoising/", mode: 'symlink' } input: @@ -93,9 +93,9 @@ process DENOISING { process TOPUP { cpus 4 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/Topup/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/Topup/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/Topup/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/Topup/", mode: 'symlink' } input: @@ -132,9 +132,9 @@ process EDDY_TOPUP { cpus params.processes_eddy memory { 5.GB * task.attempt } if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/Eddy_Topup/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/Eddy_Topup/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/Eddy_Topup/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/Eddy_Topup/", mode: 'symlink' } input: @@ -174,9 +174,9 @@ process EDDY_TOPUP { process N4 { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/N4/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/N4/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/N4/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/N4/", mode: 'symlink' } input: @@ -205,9 +205,9 @@ process N4 { process CROP_DWI { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/Crop/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/Crop/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/Crop/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/Crop/", mode: 'symlink' } input: @@ -233,9 +233,9 @@ process CROP_DWI { process DENOISE_T1 { cpus params.processes_denoise_t1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/ANAT/Denoising/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/ANAT/Denoising/", mode: 'copy' } else { - publishDir "${params.output_dir}/ANAT/Denoising/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/ANAT/Denoising/", mode: 'symlink' } input: @@ -258,9 +258,9 @@ process DENOISE_T1 { process N4_T1 { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/ANAT/N4/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/ANAT/N4/", mode: 'copy' } else { - publishDir "${params.output_dir}/ANAT/N4/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/ANAT/N4/", mode: 'symlink' } input: @@ -284,9 +284,9 @@ process N4_T1 { process CROP_ANAT { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/ANAT/Crop/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/ANAT/Crop/", mode: 'copy' } else { - publishDir "${params.output_dir}/ANAT/Crop/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/ANAT/Crop/", mode: 'symlink' } input: @@ -310,9 +310,9 @@ process CROP_ANAT { process RESAMPLE_T1 { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/ANAT/Resample/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/ANAT/Resample/", mode: 'copy' } else { - publishDir "${params.output_dir}/ANAT/Resample/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/ANAT/Resample/", mode: 'symlink' } input: @@ -336,9 +336,9 @@ process RESAMPLE_T1 { process BET_T1 { cpus params.processes_bet_t1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/ANAT/Bet/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/ANAT/Bet/", mode: 'copy' } else { - publishDir "${params.output_dir}/ANAT/Bet/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/ANAT/Bet/", mode: 'symlink' } input: @@ -367,9 +367,9 @@ process BET_T1 { process RESAMPLE_ANAT { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/ANAT/Resample/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/ANAT/Resample/", mode: 'copy' } else { - publishDir "${params.output_dir}/ANAT/Resample/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/ANAT/Resample/", mode: 'symlink' } input: @@ -397,9 +397,9 @@ process RESAMPLE_ANAT { process NORMALIZE { cpus 3 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/Normalize/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/Normalize/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/Normalize/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/Normalize/", mode: 'symlink' } input: @@ -445,9 +445,9 @@ process NORMALIZE { process RESAMPLE_DWI { cpus 3 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/Resample/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/Resample/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/Resample/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/Resample/", mode: 'symlink' } input: @@ -474,9 +474,9 @@ process RESAMPLE_DWI { process EXTRACT_B0 { cpus 3 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/B0/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/B0/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/B0/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/B0/", mode: 'symlink' } input: @@ -498,9 +498,9 @@ process EXTRACT_B0 { process DWI_MASK { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/DWI/Mask/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/DWI/Mask/", mode: 'copy' } else { - publishDir "${params.output_dir}/DWI/Mask/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/DWI/Mask/", mode: 'symlink' } input: diff --git a/modules/tracking/processes/registration_processes.nf b/modules/tracking/processes/registration_processes.nf index 727cc9d..59f9ff0 100644 --- a/modules/tracking/processes/registration_processes.nf +++ b/modules/tracking/processes/registration_processes.nf @@ -5,9 +5,9 @@ nextflow.enable.dsl=2 process REGISTER_T2 { cpus params.processes_registration if ( ! params.symlink ) { - publishDir "${params.output_dir}/Registration/Register_Anat/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Registration/Register_Anat/", mode: 'copy' } else { - publishDir "${params.output_dir}/Registration/Register_Anat/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Registration/Register_Anat/", mode: 'symlink' } input: @@ -62,9 +62,9 @@ process REGISTER_T2 { process REGISTER_T1 { cpus params.processes_registration if ( ! params.symlink ) { - publishDir "${params.output_dir}/Registration/Register_Anat/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Registration/Register_Anat/", mode: 'copy' } else { - publishDir "${params.output_dir}/Registration/Register_Anat/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Registration/Register_Anat/", mode: 'symlink' } input: diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index 5eb3237..10957f3 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -5,9 +5,9 @@ nextflow.enable.dsl=2 process SEGMENT_TISSUES { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/Tracking/Segmentation/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Tracking/Segmentation/", mode: 'copy' } else { - publishDir "${params.output_dir}/Tracking/Segmentation/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Tracking/Segmentation/", mode: 'symlink' } input: @@ -37,9 +37,9 @@ process SEGMENT_TISSUES { process GENERATE_MASKS { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' } else { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' } input: @@ -66,9 +66,9 @@ process GENERATE_MASKS { process LOCAL_TRACKING_MASK { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' } else { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' } input: @@ -96,9 +96,9 @@ process LOCAL_TRACKING_MASK { process LOCAL_SEEDING_MASK { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' } else { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' } input: @@ -126,9 +126,9 @@ process LOCAL_SEEDING_MASK { process LOCAL_TRACKING { cpus 2 if ( ! params.symlink ) { - publishDir "${params.output_dir}/Tracking/Tracking/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Tracking/Tracking/", mode: 'copy' } else { - publishDir "${params.output_dir}/Tracking/Tracking/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Tracking/Tracking/", mode: 'symlink' } input: @@ -157,9 +157,9 @@ process LOCAL_TRACKING { process PFT_SEEDING_MASK { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' } else { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' } input: @@ -195,9 +195,9 @@ process PFT_SEEDING_MASK { process PFT_TRACKING_MASK { cpus 1 if ( ! params.symlink ) { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' } else { - publishDir "${params.output_dir}/Tracking/Masks/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' } input: @@ -223,9 +223,9 @@ process PFT_TRACKING_MASK { process PFT_TRACKING { cpus 2 if ( ! params.symlink ) { - publishDir "${params.output_dir}/Tracking/Tracking/", mode: 'copy' + publishDir "${params.output_dir}/${sid}/Tracking/Tracking/", mode: 'copy' } else { - publishDir "${params.output_dir}/Tracking/Tracking/", mode: 'symlink' + publishDir "${params.output_dir}/${sid}/Tracking/Tracking/", mode: 'symlink' } input: From 297bbb90abf0cb9019fb2e024ce8d0e2fa3d0f87 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 6 Nov 2023 10:09:30 -0500 Subject: [PATCH 35/54] reverting back to $task.process for output --- modules/connectomics/processes/commit.nf | 10 --- .../connectomics/processes/compute_metrics.nf | 10 --- modules/connectomics/processes/decompose.nf | 5 -- modules/connectomics/processes/transform.nf | 10 --- modules/connectomics/processes/viz.nf | 5 -- modules/freesurfer/processes/atlases.nf | 15 ---- modules/freesurfer/processes/freesurfer.nf | 5 -- modules/tracking/processes/DTI_processes.nf | 10 --- modules/tracking/processes/FODF_processes.nf | 20 ----- modules/tracking/processes/SH_processes.nf | 10 --- modules/tracking/processes/preprocess.nf | 85 ------------------- .../processes/registration_processes.nf | 10 --- .../tracking/processes/tracking_processes.nf | 40 --------- nextflow.config | 7 +- 14 files changed, 4 insertions(+), 238 deletions(-) diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index 27bc852..a82b1ae 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -5,11 +5,6 @@ nextflow.enable.dsl=2 process COMMIT { cpus params.processes_commit memory params.commit_memory_limit - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Connectomics/Commit/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Connectomics/Commit/", mode: 'symlink' - } input: tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks) @@ -49,11 +44,6 @@ process COMMIT { process COMMIT_ON_TRK { cpus params.processes_commit memory params.commit_memory_limit - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Connectomics/Commit/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Connectomics/Commit/", mode: 'symlink' - } input: tuple val(sid), path(trk_h5), path(dwi), path(bval), path(bvec), path(peaks) diff --git a/modules/connectomics/processes/compute_metrics.nf b/modules/connectomics/processes/compute_metrics.nf index 6824b7c..a9b72b5 100644 --- a/modules/connectomics/processes/compute_metrics.nf +++ b/modules/connectomics/processes/compute_metrics.nf @@ -5,11 +5,6 @@ nextflow.enable.dsl=2 process COMPUTE_AFD_FIXEL { cpus params.processes_afd_fixel memory '2 GB' - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Connectomics/AFD_Fixel/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Connectomics/AFD_Fixel/", mode: 'symlink' - } input: tuple val(sid), path(h5), path(fodf) @@ -25,11 +20,6 @@ process COMPUTE_AFD_FIXEL { process COMPUTE_CONNECTIVITY { cpus params.processes_connectivity memory '2 GB' - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Connectomics/Connectivity_Metrics/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Connectomics/Connectivity_Metrics/", mode: 'symlink' - } input: tuple val(sid), path(h5), path(labels), path(metrics) diff --git a/modules/connectomics/processes/decompose.nf b/modules/connectomics/processes/decompose.nf index f566c17..1a20dc2 100644 --- a/modules/connectomics/processes/decompose.nf +++ b/modules/connectomics/processes/decompose.nf @@ -5,11 +5,6 @@ nextflow.enable.dsl=2 process DECOMPOSE_CONNECTIVITY { cpus 1 memory { 7.B * trk.size() } - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Connectomics/Decompose/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Connectomics/Decompose/", mode: 'symlink' - } input: tuple val(sid), path(trk), path(labels) diff --git a/modules/connectomics/processes/transform.nf b/modules/connectomics/processes/transform.nf index 3e6d1a1..d4d086c 100644 --- a/modules/connectomics/processes/transform.nf +++ b/modules/connectomics/processes/transform.nf @@ -5,11 +5,6 @@ nextflow.enable.dsl=2 process TRANSFORM_LABELS { cpus 1 memory '2 GB' - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Connectomics/Transform_Labels/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Connectomics/Transform_Labels/", mode: 'symlink' - } input: tuple val(sid), path(labels), path(t2), path(mat), path(syn) @@ -27,11 +22,6 @@ process TRANSFORM_LABELS { process TRANSFORM_T1 { cpus 1 memory '2 GB' - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Connectomics/Transform_T1/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Connectomics/Transform_T1/", mode: 'symlink' - } input: tuple val(sid), path(t1), path(dwi), path(bval), path(bvec), path(mat), path(syn) diff --git a/modules/connectomics/processes/viz.nf b/modules/connectomics/processes/viz.nf index 6421153..3fd6a92 100644 --- a/modules/connectomics/processes/viz.nf +++ b/modules/connectomics/processes/viz.nf @@ -5,11 +5,6 @@ nextflow.enable.dsl=2 process VISUALIZE_CONNECTIVITY { cpus 1 memory "2 GB" - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Connectomics/Visualize_Connectivity/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Connectomics/Visualize_Connectivity/", mode: 'symlink' - } input: tuple val(sid), path(npy) diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index 82afc7c..a10a8ae 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -4,11 +4,6 @@ nextflow.enable.dsl=2 process FS_BN_GL_SF { cpus params.nb_threads - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Freesurfer/FS_BN_GL_SF/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Freesurfer/FS_BN_GL_SF/", mode: 'symlink' - } input: tuple val(sid), path(folder) @@ -37,11 +32,6 @@ process FS_BN_GL_SF { process LOBES { cpus params.nb_threads - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Freesurfer/Lobes/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Freesurfer/Lobes/", mode: 'symlink' - } input: tuple val(sid), path(folder) @@ -84,11 +74,6 @@ process LOBES { process LAUSANNE { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Freesurfer/Lausanne/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Freesurfer/Lausanne/", mode: 'symlink' - } input: tuple val(sid), path(folder) diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index 9424718..5700e3f 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -4,11 +4,6 @@ nextflow.enable.dsl=2 process FREESURFER { cpus params.nb_threads - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Freesurfer/Freesurfer/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Freesurfer/Freesurfer/", mode: 'symlink' - } input: tuple val(sid), path(anat) diff --git a/modules/tracking/processes/DTI_processes.nf b/modules/tracking/processes/DTI_processes.nf index f7be5a3..ffb9e43 100644 --- a/modules/tracking/processes/DTI_processes.nf +++ b/modules/tracking/processes/DTI_processes.nf @@ -4,11 +4,6 @@ nextflow.enable.dsl=2 process EXTRACT_DTI_SHELL { cpus 3 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DTI/DTI_Shells/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DTI/DTI_Shells/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -42,11 +37,6 @@ process EXTRACT_DTI_SHELL { process DTI_METRICS { cpus 3 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DTI/DTI_Metrics/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DTI/DTI_Metrics/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) diff --git a/modules/tracking/processes/FODF_processes.nf b/modules/tracking/processes/FODF_processes.nf index 130d2a8..404118c 100644 --- a/modules/tracking/processes/FODF_processes.nf +++ b/modules/tracking/processes/FODF_processes.nf @@ -4,11 +4,6 @@ nextflow.enable.dsl=2 process FODF_SHELL { cpus 3 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/FODF/FODF_Shells/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/FODF/FODF_Shells/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -46,11 +41,6 @@ process FODF_SHELL { process COMPUTE_FRF { cpus 3 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/FRF/Compute_FRF/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/FRF/Compute_FRF/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -80,11 +70,6 @@ process COMPUTE_FRF { process MEAN_FRF { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.Mean_FRF_Publish_Dir}/", mode: 'copy' - } else { - publishDir "${params.Mean_FRF_Publish_Dir}/", mode: 'symlink' - } input: path(all_frf) @@ -101,11 +86,6 @@ process MEAN_FRF { process FODF_METRICS { cpus params.processes_fodf - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/FODF/FODF_Metrics/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/FODF/FODF_Metrics/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask), path(fa), path(md), path(frf) diff --git a/modules/tracking/processes/SH_processes.nf b/modules/tracking/processes/SH_processes.nf index 23427fe..9c7d5aa 100644 --- a/modules/tracking/processes/SH_processes.nf +++ b/modules/tracking/processes/SH_processes.nf @@ -4,11 +4,6 @@ nextflow.enable.dsl=2 process SH_FITTING_SHELL { cpus 3 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/SH/SH_Shells/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/SH/SH_Shells/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -28,11 +23,6 @@ process SH_FITTING_SHELL { process SH_FITTING { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/SH/SH_Fitting/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/SH/SH_Fitting/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec) diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index d2a0aa7..b630e75 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -4,11 +4,6 @@ nextflow.enable.dsl=2 process BET_DWI { cpus 2 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/Bet/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/Bet/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -44,11 +39,6 @@ process BET_DWI { process BET_T2 { cpus 2 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/ANAT/Bet/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/ANAT/Bet/", mode: 'symlink' - } input: tuple val(sid), path(anat) @@ -67,11 +57,6 @@ process BET_T2 { process DENOISING { cpus params.processes_denoise_dwi - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/Denoising/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/Denoising/", mode: 'symlink' - } input: tuple val(sid), path(dwi) @@ -92,11 +77,6 @@ process DENOISING { process TOPUP { cpus 4 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/Topup/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/Topup/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(revb0) @@ -131,11 +111,6 @@ process TOPUP { process EDDY_TOPUP { cpus params.processes_eddy memory { 5.GB * task.attempt } - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/Eddy_Topup/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/Eddy_Topup/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0s_corrected), path(field), path(movpar) @@ -173,11 +148,6 @@ process EDDY_TOPUP { process N4 { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/N4/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/N4/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -204,11 +174,6 @@ process N4 { process CROP_DWI { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/Crop/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/Crop/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(b0_mask) @@ -232,11 +197,6 @@ process CROP_DWI { process DENOISE_T1 { cpus params.processes_denoise_t1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/ANAT/Denoising/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/ANAT/Denoising/", mode: 'symlink' - } input: tuple val(sid), path(t1) @@ -257,11 +217,6 @@ process DENOISE_T1 { process N4_T1 { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/ANAT/N4/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/ANAT/N4/", mode: 'symlink' - } input: tuple val(sid), path(t1) @@ -283,11 +238,6 @@ process N4_T1 { process CROP_ANAT { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/ANAT/Crop/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/ANAT/Crop/", mode: 'symlink' - } input: tuple val(sid), path(t2w), path(mask) @@ -309,11 +259,6 @@ process CROP_ANAT { process RESAMPLE_T1 { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/ANAT/Resample/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/ANAT/Resample/", mode: 'symlink' - } input: tuple val(sid), path(t1) @@ -335,11 +280,6 @@ process RESAMPLE_T1 { process BET_T1 { cpus params.processes_bet_t1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/ANAT/Bet/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/ANAT/Bet/", mode: 'symlink' - } input: tuple val(sid), path(t1) @@ -366,11 +306,6 @@ process BET_T1 { process RESAMPLE_ANAT { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/ANAT/Resample/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/ANAT/Resample/", mode: 'symlink' - } input: tuple val(sid), path(t2w), path(mask) @@ -396,11 +331,6 @@ process RESAMPLE_ANAT { process NORMALIZE { cpus 3 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/Normalize/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/Normalize/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -444,11 +374,6 @@ process NORMALIZE { process RESAMPLE_DWI { cpus 3 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/Resample/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/Resample/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(mask) @@ -473,11 +398,6 @@ process RESAMPLE_DWI { process EXTRACT_B0 { cpus 3 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/B0/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/B0/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -497,11 +417,6 @@ process EXTRACT_B0 { process DWI_MASK { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/DWI/Mask/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/DWI/Mask/", mode: 'symlink' - } input: tuple val(sid), path(dwi), path(bval), path(bvec) diff --git a/modules/tracking/processes/registration_processes.nf b/modules/tracking/processes/registration_processes.nf index 59f9ff0..c445dec 100644 --- a/modules/tracking/processes/registration_processes.nf +++ b/modules/tracking/processes/registration_processes.nf @@ -4,11 +4,6 @@ nextflow.enable.dsl=2 process REGISTER_T2 { cpus params.processes_registration - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Registration/Register_Anat/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Registration/Register_Anat/", mode: 'symlink' - } input: tuple val(sid), path(md), path(t2w), path(wm_mask) @@ -61,11 +56,6 @@ process REGISTER_T2 { process REGISTER_T1 { cpus params.processes_registration - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Registration/Register_Anat/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Registration/Register_Anat/", mode: 'symlink' - } input: tuple val(sid), path(fa), path(t1), path(t1_mask), path(b0) diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index 10957f3..ad4ef84 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -4,11 +4,6 @@ nextflow.enable.dsl=2 process SEGMENT_TISSUES { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Tracking/Segmentation/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Tracking/Segmentation/", mode: 'symlink' - } input: tuple val(sid), path(anat) @@ -36,11 +31,6 @@ process SEGMENT_TISSUES { process GENERATE_MASKS { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' - } input: tuple val(sid), path(wm_mask), path(fa) @@ -65,11 +55,6 @@ process GENERATE_MASKS { process LOCAL_TRACKING_MASK { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' - } input: tuple val(sid), path(wm), path(fa) @@ -95,11 +80,6 @@ process LOCAL_TRACKING_MASK { process LOCAL_SEEDING_MASK { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' - } input: tuple val(sid), path(wm), path(fa) @@ -125,11 +105,6 @@ process LOCAL_SEEDING_MASK { process LOCAL_TRACKING { cpus 2 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Tracking/Tracking/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Tracking/Tracking/", mode: 'symlink' - } input: tuple val(sid), path(fodf), path(seeding_mask), path(tracking_mask) @@ -156,11 +131,6 @@ process LOCAL_TRACKING { process PFT_SEEDING_MASK { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' - } input: tuple val(sid), path(wm), path(fa), path(interface_mask) @@ -194,11 +164,6 @@ process PFT_SEEDING_MASK { process PFT_TRACKING_MASK { cpus 1 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Tracking/Masks/", mode: 'symlink' - } input: tuple val(sid), path(wm), path(gm), path(csf) @@ -222,11 +187,6 @@ process PFT_TRACKING_MASK { process PFT_TRACKING { cpus 2 - if ( ! params.symlink ) { - publishDir "${params.output_dir}/${sid}/Tracking/Tracking/", mode: 'copy' - } else { - publishDir "${params.output_dir}/${sid}/Tracking/Tracking/", mode: 'symlink' - } input: tuple val(sid), path(fodf), path(include), path(exclude), path(seed) diff --git a/nextflow.config b/nextflow.config index 98f248a..d9d336f 100644 --- a/nextflow.config +++ b/nextflow.config @@ -1,5 +1,5 @@ process { - publishDir = {"./Results_ChildBrainFlow/$sid/$task.process"} + publishDir = {"./Results_ChildBrainFlow/$sid/$task.process.replaceAll(':', '-')"} scratch = true errorStrategy = { task.attempt <= 3 ? 'retry' : 'ignore' } maxRetries = 3 @@ -198,7 +198,6 @@ params { // ** Output Options ** // output_dir = "./Results_ChildBrainFlow/" - symlink = true } if ( params.output_dir ) { @@ -223,7 +222,9 @@ singularity.autoMounts = true profiles { no_symlink { - symlink = false + process{ + publishDir = [path: {"./Results_Infant_Tracking/$sid/$task.process.replaceAll(':', '-')"}, mode: 'copy'] + } } macos { From b99c5c87023e3f20f1bcdbf6326cb53085ef6185 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 6 Nov 2023 10:15:44 -0500 Subject: [PATCH 36/54] fix typo --- nextflow.config | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nextflow.config b/nextflow.config index d9d336f..dc1daae 100644 --- a/nextflow.config +++ b/nextflow.config @@ -1,5 +1,5 @@ process { - publishDir = {"./Results_ChildBrainFlow/$sid/$task.process.replaceAll(':', '-')"} + publishDir = {"./Results_ChildBrainFlow/$sid/${task.process.replaceAll(':', '-')}"} scratch = true errorStrategy = { task.attempt <= 3 ? 'retry' : 'ignore' } maxRetries = 3 @@ -223,7 +223,7 @@ singularity.autoMounts = true profiles { no_symlink { process{ - publishDir = [path: {"./Results_Infant_Tracking/$sid/$task.process.replaceAll(':', '-')"}, mode: 'copy'] + publishDir = [path: {"./Results_Infant_Tracking/$sid/${task.process.replaceAll(':', '-')}"}, mode: 'copy'] } } From 4c014f0f47783d31384be38538ee79432c76e6d2 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 6 Nov 2023 10:25:28 -0500 Subject: [PATCH 37/54] explicitly outputting and selecting altases --- modules/freesurfer/processes/atlases.nf | 18 +++++++++----- .../freesurfer/workflows/freesurferflow.nf | 24 +++++-------------- 2 files changed, 18 insertions(+), 24 deletions(-) diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index a10a8ae..c70acaf 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -9,12 +9,18 @@ process FS_BN_GL_SF { tuple val(sid), path(folder) output: - tuple val(sid), path("*freesurfer*.nii.gz"), emit: freesurfer - tuple val(sid), path("*brainnetome*.nii.gz"), emit: brainnetome - tuple val(sid), path("*glasser*.nii.gz"), emit: glasser - tuple val(sid), path("*schaefer_100*.nii.gz"), emit: schaefer_100 - tuple val(sid), path("*schaefer_200*.nii.gz"), emit: schaefer_200 - tuple val(sid), path("*schaefer_400*.nii.gz"), emit: schaefer_400 + tuple val(sid), path("*freesurfer_v5.nii.gz"), emit: freesurfer + tuple val(sid), path("*freesurfer_v5_dilate.nii.gz"), emit: freesurfer_dilated + tuple val(sid), path("*brainnetome_v5.nii.gz"), emit: brainnetome + tuple val(sid), path("*brainnetome_v5_dilate.nii.gz"), emit: brainnetome_dilated + tuple val(sid), path("*glasser_v5.nii.gz"), emit: glasser + tuple val(sid), path("*glasser_v5_dilate.nii.gz"), emit: glasser_dilated + tuple val(sid), path("*schaefer_100_v5.nii.gz"), emit: schaefer_100 + tuple val(sid), path("*schaefer_100_v5_dilate.nii.gz"), emit: schaefer_100_dilated + tuple val(sid), path("*schaefer_200_v5.nii.gz"), emit: schaefer_200 + tuple val(sid), path("*schaefer_200_v5_dilate.nii.gz"), emit: schaefer_200_dilated + tuple val(sid), path("*schaefer_400_v5.nii.gz"), emit: schaefer_400 + tuple val(sid), path("*schaefer_400_v5_dilate.nii.gz"), emit: schaefer_400_dilated path("*[brainnetome,freesurfer,glasser,schaefer]*.txt") path("*[brainnetome,freesurfer,glasser,schaefer]*.json") diff --git a/modules/freesurfer/workflows/freesurferflow.nf b/modules/freesurfer/workflows/freesurferflow.nf index 6aadda8..8388da9 100644 --- a/modules/freesurfer/workflows/freesurferflow.nf +++ b/modules/freesurfer/workflows/freesurferflow.nf @@ -47,51 +47,39 @@ workflow FREESURFERFLOW { // ** Could be cleaner than a bunch of if statements in the future. ** // if ( params.use_freesurfer_atlas ) { if ( params.use_dilated_labels ) { - labels = FS_BN_GL_SF.out.freesurfer - .map{ [it[0], it[2]] } + labels = FS_BN_GL_SF.out.freesurfer_dilated } else { labels = FS_BN_GL_SF.out.freesurfer - .map{ [it[0], it[1]] } } } else if ( params.use_brainnetome_atlas ) { if ( params.use_dilated_labels ) { - labels = FS_BN_GL_SF.out.brainnetome - .map{ [it[0], it[2]] } + labels = FS_BN_GL_SF.out.brainnetome_dilated } else { labels = FS_BN_GL_SF.out.brainnetome - .map{ [it[0], it[1]] } } } else if ( params.use_glasser_atlas ) { if ( params.use_dilated_labels ) { - labels = FS_BN_GL_SF.out.glasser - .map{ [it[0], it[2]] } + labels = FS_BN_GL_SF.out.glasser_dilated } else { labels = FS_BN_GL_SF.out.glasser - .map{ [it[0], it[1]] } } } else if ( params.use_schaefer_100_atlas ) { if ( params.use_dilated_labels ) { - labels = FS_BN_GL_SF.out.schaefer_100 - .map{ [it[0], it[2]] } + labels = FS_BN_GL_SF.out.schaefer_100_dilated } else { labels = FS_BN_GL_SF.out.schaefer_100 - .map{ [it[0], it[1]] } } } else if ( params.use_schaefer_200_atlas ) { if ( params.use_dilated_labels ) { - labels = FS_BN_GL_SF.out.schaefer_200 - .map{ [it[0], it[2]] } + labels = FS_BN_GL_SF.out.schaefer_200_dilated } else { labels = FS_BN_GL_SF.out.schaefer_200 - .map{ [it[0], it[1]] } } } else if ( params.use_schaefer_400_atlas ) { if ( params.use_dilated_labels ) { - labels = FS_BN_GL_SF.out.schaefer_400 - .map{ [it[0], it[2]] } + labels = FS_BN_GL_SF.out.schaefer_400_dilated } else { labels = FS_BN_GL_SF.out.schaefer_400 - .map{ [it[0], it[1]] } } } else if ( params.use_lausanne_1_atlas ) { if ( params.use_dilated_labels ) { From 108204dfdcf39be32a41e44991f93720793eb064 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 14 Nov 2023 10:24:14 -0500 Subject: [PATCH 38/54] bumping scilus 1.5 -> 1.6 in container --- containers/Dockerfile | 6 ++++-- containers/apptainer_recipe.def | 8 +++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/containers/Dockerfile b/containers/Dockerfile index c63bb98..a870df3 100644 --- a/containers/Dockerfile +++ b/containers/Dockerfile @@ -1,4 +1,4 @@ -FROM scilus/scilus-flows:1.5.0 +FROM scilus/scilus:1.6.0 LABEL version="ChildBrainFlow-1.0.0" @@ -15,7 +15,9 @@ RUN wget https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.2.0/freesurfer RUN wget -O /usr/local/freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 RUN apt-get update && \ apt-get install csh tcsh && \ - apt-get install libglu1-mesa + apt-get install -y libglu1-mesa libxt6 libxmu6 libgl1 freeglut3-dev \ + echo "/usr/local/lib" >> /etc/ld.so.conf \ + ldconfig RUN wget http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 \ tar xjf parallel-latest.tar.bz2 \ cd parallel-* && ./configure && make && make install \ diff --git a/containers/apptainer_recipe.def b/containers/apptainer_recipe.def index 46cc309..370c9ad 100644 --- a/containers/apptainer_recipe.def +++ b/containers/apptainer_recipe.def @@ -1,5 +1,5 @@ Bootstrap: docker -From: scilus/scilus-flows:1.5.0 +From: scilus/scilus:1.6.0 %labels version ChildBrainFlow-1.0.0 @@ -16,8 +16,10 @@ From: scilus/scilus-flows:1.5.0 apt-get update apt-get install -y csh tcsh - # Install libGLU - apt-get install -y libglu1-mesa + # Install lib* + apt-get install -y libglu1-mesa libxt6 libxmu6 libgl1 freeglut3-dev + echo "/usr/local/lib" >> /etc/ld.so.conf + ldconfig # Download additional files wget -O /usr/local/freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 From ca753603387b39d441ff7ad9298c66f6b07e0fac Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Mon, 4 Dec 2023 21:57:44 -0500 Subject: [PATCH 39/54] add priors + change commit process --- main.nf | 15 ++++- modules/connectomics/USAGE | 2 + modules/connectomics/USAGE_ALL | 14 ++++- modules/connectomics/USAGE_INFANT | 10 ++++ modules/connectomics/USAGE_TRACKING | 12 ++++ modules/connectomics/USAGE_TRACKING_INFANT | 10 ++++ modules/connectomics/processes/commit.nf | 58 ++++++++++++++----- .../connectomics/workflows/connectomics.nf | 27 ++++++++- modules/tracking/processes/preprocess.nf | 10 ++-- .../tracking/processes/tracking_processes.nf | 24 ++++++++ nextflow.config | 7 +++ 11 files changed, 166 insertions(+), 23 deletions(-) diff --git a/main.nf b/main.nf index fdd7887..d488920 100644 --- a/main.nf +++ b/main.nf @@ -152,6 +152,10 @@ workflow { // ** Fetching transformation files ** // transfos = REGISTRATION.out.transfos + // ** Fetching FA, MD, AD for priors computation ** // + fa_md_ad_channel = DTI.out.fa_and_md + .combine(DTI.out.ad_and_rd.map{[ it[0], it[1] ]}, by: 0) + // ** Launching connectomics workflow ** // CONNECTOMICS(tracking, labels, @@ -159,7 +163,8 @@ workflow { fodf, metrics_flat, t2w, - transfos) + transfos, + fa_md_ad_channel) } if ( params.run_connectomics && !params.run_tracking ) { @@ -185,7 +190,8 @@ workflow { data.fodf, metrics, anat, - data.transfos) + data.transfos, + []) } } } @@ -318,8 +324,13 @@ def display_usage () { "max_length":"$params.max_length", "loop_max_angle":"$params.loop_max_angle", "outlier_threshold":"$params.outlier_threshold", + "compute_priors":"$params.compute_priors", + "fa_min_priors":"$params.fa_min_priors", + "fa_max_priors":"$params.fa_max_priors", + "md_min_priors":"$params.md_min_priors", "run_commit":"$params.run_commit", "use_commit2":"$params.use_commit2", + "use_both":"$params.use_both", "commit_on_trk":"$params.commit_on_trk", "b_thr":"$params.b_thr", "ball_stick":"$params.ball_stick", diff --git a/modules/connectomics/USAGE b/modules/connectomics/USAGE index 23389d6..a95ce6a 100644 --- a/modules/connectomics/USAGE +++ b/modules/connectomics/USAGE @@ -67,6 +67,8 @@ DESCRIPTION --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) COMMIT2 output will replaced the COMMIT1 output. + --use_both If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell + data. ($use_both) --b_thr Tolerance value to considier bvalues to be the same shell. --nbr_dir Number of directions, (half sphere), representing the possible orientations of the response functions ($nbr_dir) diff --git a/modules/connectomics/USAGE_ALL b/modules/connectomics/USAGE_ALL index 3924ca3..baf0311 100644 --- a/modules/connectomics/USAGE_ALL +++ b/modules/connectomics/USAGE_ALL @@ -16,7 +16,7 @@ process have been taken: Run Tracking Pipeline -nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics,freesurfer DESCRIPTION @@ -240,10 +240,22 @@ OPTIONAL ARGUMENTS (current value) --outlier_threshold Outlier removal threshold when using hierarchical QB ($outlier_threshold) + COMPUTE_PRIORS OPTIONS + --compute_priors If set, priors will individually computed for each subject before being + fed to COMMIT. ($compute_priors) + --fa_min_priors Minimal FA value to consider a voxel a single fiber population. + ($fa_min_priors) + --fa_max_priors Maximal FA value to consider a voxel as being in a ventricle. + ($fa_max_priors) + --md_min_priors Minimal MD value to consider a voxel as being in a ventricle. + ($md_min_priors) + COMMIT OPTIONS --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) COMMIT2 output will replaced the COMMIT1 output. + --use_both If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell + data. ($use_both) --b_thr Tolerance value to considier bvalues to be the same shell. --nbr_dir Number of directions, (half sphere), representing the possible orientations of the response functions ($nbr_dir) diff --git a/modules/connectomics/USAGE_INFANT b/modules/connectomics/USAGE_INFANT index bc606ca..05908a4 100644 --- a/modules/connectomics/USAGE_INFANT +++ b/modules/connectomics/USAGE_INFANT @@ -63,6 +63,16 @@ DESCRIPTION --outlier_threshold Outlier removal threshold when using hierarchical QB ($outlier_threshold) + COMPUTE_PRIORS OPTIONS + --compute_priors If set, priors will individually computed for each subject before being + fed to COMMIT. ($compute_priors) + --fa_min_priors Minimal FA value to consider a voxel a single fiber population. + ($fa_min_priors) + --fa_max_priors Maximal FA value to consider a voxel as being in a ventricle. + ($fa_max_priors) + --md_min_priors Minimal MD value to consider a voxel as being in a ventricle. + ($md_min_priors) + COMMIT OPTIONS --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) diff --git a/modules/connectomics/USAGE_TRACKING b/modules/connectomics/USAGE_TRACKING index d8bc40e..f7d1fad 100644 --- a/modules/connectomics/USAGE_TRACKING +++ b/modules/connectomics/USAGE_TRACKING @@ -205,10 +205,22 @@ OPTIONAL ARGUMENTS (current value) --outlier_threshold Outlier removal threshold when using hierarchical QB ($outlier_threshold) + COMPUTE_PRIORS OPTIONS + --compute_priors If set, priors will individually computed for each subject before being + fed to COMMIT. ($compute_priors) + --fa_min_priors Minimal FA value to consider a voxel a single fiber population. + ($fa_min_priors) + --fa_max_priors Maximal FA value to consider a voxel as being in a ventricle. + ($fa_max_priors) + --md_min_priors Minimal MD value to consider a voxel as being in a ventricle. + ($md_min_priors) + COMMIT OPTIONS --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) COMMIT2 output will replaced the COMMIT1 output. + --use_both If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell + data. ($use_both) --b_thr Tolerance value to considier bvalues to be the same shell. --nbr_dir Number of directions, (half sphere), representing the possible orientations of the response functions ($nbr_dir) diff --git a/modules/connectomics/USAGE_TRACKING_INFANT b/modules/connectomics/USAGE_TRACKING_INFANT index f6a1799..925a059 100644 --- a/modules/connectomics/USAGE_TRACKING_INFANT +++ b/modules/connectomics/USAGE_TRACKING_INFANT @@ -204,6 +204,16 @@ OPTIONAL ARGUMENTS (current value) --outlier_threshold Outlier removal threshold when using hierarchical QB ($outlier_threshold) + COMPUTE_PRIORS OPTIONS + --compute_priors If set, priors will individually computed for each subject before being + fed to COMMIT. ($compute_priors) + --fa_min_priors Minimal FA value to consider a voxel a single fiber population. + ($fa_min_priors) + --fa_max_priors Maximal FA value to consider a voxel as being in a ventricle. + ($fa_max_priors) + --md_min_priors Minimal MD value to consider a voxel as being in a ventricle. + ($md_min_priors) + COMMIT OPTIONS --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index a82b1ae..e0e7233 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -7,35 +7,46 @@ process COMMIT { memory params.commit_memory_limit input: - tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks) + tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks), path(para_diff), path(iso_diff) output: - tuple val(sid), path("${sid}__decompose_commit.h5"), emit: h5_commit - tuple val(sid), path("${sid}__results_bzs/") + tuple val(sid), path("${sid}__decompose_commit.h5"), emit: h5_commit, optional: true + tuple val(sid), path("${sid}__essential_tractogram.trk"), emit: trk_commit, optional: true + tuple val(sid), path("${sid}__results_bzs/"), optional: true + tuple val(sid), path("${sid}__results_bzs_1/"), optional: true + tuple val(sid), path("${sid}__results_bzs_2/"), optional: true when: params.run_commit script: - ball_stick_arg="" - perp_diff_arg="" - if ( params.ball_stick ) { - ball_stick_arg="--ball_stick" - } - else { - perp_diff_arg="--perp_diff $params.perp_diff" - } + def para_diff_arg = para_diff ? "--para_diff \$(cat $para_diff)" : "--para_diff $params.para_diff" + def iso_diff_arg = iso_diff ? "--iso_diff \$(cat $iso_diff)" : "--iso_diff $params.iso_diff" + def perp_diff_arg = ball_stick_arg ? "" : "--perp_diff $params.perp_diff" + def ball_stick_arg = ball_stick_arg ? "--ball_stick" : "" + if ( params.use_commit2 ) { """ - scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs/" --ball_stick --commit2 --in_peaks $peaks\ + scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs/" --ball_stick --commit2 \ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ - --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff + $para_diff_arg $iso_diff_arg mv "${sid}__results_bzs/commit_2/decompose_commit.h5" "./${sid}__decompose_commit.h5" """ } + else if ( params.use_both ) { + """ + scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs_1/" --ball_stick --commit2 \ + --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ + $para_diff_arg $iso_diff_arg + scil_run_commit.py ${sid}__results_bzs_1/commit_2/essential_tractogram.trk $dwi $bval $bvec "${sid}__results_bzs_2/"\ + --in_peaks $peaks --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ + $para_diff_arg $iso_diff_arg $perp_diff_arg + mv "${sid}__results_bzs_2/commit_1/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" + """ + } else { """ scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir $ball_stick_arg \ - --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff + $para_diff_arg $iso_diff_arg $perp_diff_arg mv "${sid}__results_bzs/commit_1/decompose_commit.h5" "./${sid}__decompose_commit.h5" """ } @@ -68,4 +79,23 @@ process COMMIT_ON_TRK { --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff mv "${sid}__results_bzs/commit_1/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" """ +} + +process COMPUTE_PRIORS { + cpus 1 + + input: + tuple val(sid), path(fa), path(ad), path(md) + output: + tuple val(sid), path("${sid}__para_diff.txt"), emit: para_diff + tuple val(sid), path("${sid}__iso_diff.txt"), emit: iso_diff + when: + params.run_commit && params.compute_priors + + script: + """ + scil_compute_NODDI_priors.py $fa $ad $md \ + --out_txt_1fiber ${sid}__para_diff.txt --out_txt_ventricles ${sid}__iso_diff.txt \ + --fa_min $params.fa_min_priors --fa_max $params.fa_max_priors --md_min $params.md_min_priors + """ } \ No newline at end of file diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index 0d0e9c3..ca599bf 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -5,8 +5,10 @@ nextflow.enable.dsl=2 include { TRANSFORM_LABELS; TRANSFORM_T1 } from "../processes/transform.nf" include { DECOMPOSE_CONNECTIVITY } from "../processes/decompose.nf" +include { DECOMPOSE_CONNECTIVITY as DECOMPOSE_CONNECTIVITY_2 } from "../processes/decompose.nf" include { COMMIT; - COMMIT_ON_TRK } from "../processes/commit.nf" + COMMIT_ON_TRK; + COMPUTE_PRIORS } from "../processes/commit.nf" include { COMPUTE_AFD_FIXEL; COMPUTE_CONNECTIVITY } from "../processes/compute_metrics.nf" include { VISUALIZE_CONNECTIVITY } from "../processes/viz.nf" @@ -20,8 +22,11 @@ workflow CONNECTOMICS { metrics_channel anat_channel transfos_channel + fa_ad_md_channel main: + // ** Computing priors for COMMIT ** // + COMPUTE_PRIORS(fa_ad_md_channel) // ** If -profile freesurfer, transform t1 to diff space. ** // if ( params.run_freesurfer && !params.run_tracking ) { @@ -67,13 +72,33 @@ workflow CONNECTOMICS { DECOMPOSE_CONNECTIVITY(decompose_channel) // ** Running COMMIT1 or COMMIT2 ** // + if ( params.use_both ) { + commit_channel = DECOMPOSE_CONNECTIVITY.out.decompose .combine(dwi_peaks_channel, by: 0) + .combine(COMPUTE_PRIORS.out.para_diff, by: 0) + .combine(COMPUTE_PRIORS.out.iso_diff, by: 0) + COMMIT(commit_channel) + decompose_channel = COMMIT.out.trk_commit + .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) + DECOMPOSE_CONNECTIVITY_2(decompose_channel) + // ** Setting output channel ** // + afd_fixel_channel = DECOMPOSE_CONNECTIVITY_2.out.decompose + .combine(fodf_channel, by: 0) + } + else { + commit_channel = DECOMPOSE_CONNECTIVITY.out.decompose + .combine(dwi_peaks_channel, by: 0) + .combine([], by: 0) + .combine([], by: 0) + + COMMIT(commit_channel) // ** Setting output channel ** // afd_fixel_channel = COMMIT.out.h5_commit .combine(fodf_channel, by: 0) + } } // ** Computing AFD fixel ** // diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index b630e75..2acb6d2 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -240,20 +240,20 @@ process CROP_ANAT { cpus 1 input: - tuple val(sid), path(t2w), path(mask) + tuple val(sid), path(anat), path(mask) output: tuple val(sid), - path("${sid}__t2w_cropped.nii.gz"), + path("${sid}__anat_cropped.nii.gz"), path("${sid}__mask_cropped.nii.gz"), emit: cropped_anat_and_mask script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - scil_crop_volume.py $t2w ${sid}__t2w_cropped.nii.gz\ - --output_bbox t2w_boundingBox.pkl -f + scil_crop_volume.py $anat ${sid}__anat_cropped.nii.gz\ + --output_bbox boundingBox.pkl -f scil_crop_volume.py $mask ${sid}__mask_cropped.nii.gz\ - --input_bbox t2w_boundingBox.pkl -f + --input_bbox boundingBox.pkl -f """ } diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index ad4ef84..e9006b0 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -29,6 +29,30 @@ process SEGMENT_TISSUES { """ } +process ATROPOS_SEG { + cpus 1 + + input: + tuple val(sid), path(anat), path(mask) + output: + tuple val(sid), path("${sid}__map_wm.nii.gz"), emit: wm_map + tuple val(sid), path("${sid}__map_gm.nii.gz"), emit: gm_map + tuple val(sid), path("${sid}__map_csf.nii.gz"), emit: csf_map + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + antsAtroposN4.sh -d 3 -a $anat -x $mask -c 3 \ + -o ${sid}__ -m $params.atropos_m -n $params.atropos_n \ + -b $params.atropos_formulation + mv ${sid}__SegmentationPosteriors3.nii.gz ${sid}__map_wm.nii.gz + mv ${sid}__SegmentationPosteriors2.nii.gz ${sid}__map_gm.nii.gz + mv ${sid}__SegmentationPosteriors1.nii.gz ${sid}__map_csf.nii.gz + """ +} + process GENERATE_MASKS { cpus 1 diff --git a/nextflow.config b/nextflow.config index dc1daae..c39d8be 100644 --- a/nextflow.config +++ b/nextflow.config @@ -147,9 +147,16 @@ params { loop_max_angle = 330 outlier_threshold = 0.5 + //** COMPUTE_PRIORS Options **// + compute_priors = false + fa_min_priors = 0.7 + fa_max_priors = 0.1 + md_min_priors = 0.003 + //** COMMIT Options **// run_commit = true use_commit2 = true + use_both = false commit_on_trk = false b_thr = 50 nbr_dir = 500 From 61e915f0e1494b236c531cc7a0712191641f8739 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 5 Dec 2023 22:41:02 -0500 Subject: [PATCH 40/54] add priors profile --- main.nf | 15 ++++-- modules/connectomics/processes/commit.nf | 46 +++++++++++++--- .../connectomics/workflows/connectomics.nf | 24 ++++----- modules/priors/workflows/priors.nf | 54 +++++++++++++++++++ modules/tracking/processes/FODF_processes.nf | 1 + nextflow.config | 5 ++ 6 files changed, 122 insertions(+), 23 deletions(-) create mode 100644 modules/priors/workflows/priors.nf diff --git a/main.nf b/main.nf index d488920..822e15d 100644 --- a/main.nf +++ b/main.nf @@ -25,6 +25,7 @@ include { TRACKING } from "./modules/tracking/workflows/tracking.nf" include { CONNECTOMICS } from "./modules/connectomics/workflows/connectomics.nf" include { POPULATION_TEMPLATE } from "./modules/template/workflows/pop_template.nf" include { FREESURFERFLOW } from "./modules/freesurfer/workflows/freesurferflow.nf" +include { PRIORS } from "./modules/priors/workflows/priors.nf" workflow { if (params.help) { display_usage() } @@ -52,6 +53,14 @@ workflow { FREESURFERFLOW(data.anat) } + if ( params.priors ) { + + data = get_data_tracking() + + PRIORS(data.dwi) + + } + if ( params.run_tracking ) { if ( params.infant_config ) { data = get_data_tracking_infant() @@ -152,9 +161,9 @@ workflow { // ** Fetching transformation files ** // transfos = REGISTRATION.out.transfos - // ** Fetching FA, MD, AD for priors computation ** // - fa_md_ad_channel = DTI.out.fa_and_md - .combine(DTI.out.ad_and_rd.map{[ it[0], it[1] ]}, by: 0) + // ** Fetching FA, MD, AD, RD for priors computation ** // + fa_md_ad_rd_channel = DTI.out.fa_and_md + .combine(DTI.out.ad_and_rd, by: 0) // ** Launching connectomics workflow ** // CONNECTOMICS(tracking, diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index e0e7233..4a7bf63 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -20,10 +20,10 @@ process COMMIT { script: def para_diff_arg = para_diff ? "--para_diff \$(cat $para_diff)" : "--para_diff $params.para_diff" def iso_diff_arg = iso_diff ? "--iso_diff \$(cat $iso_diff)" : "--iso_diff $params.iso_diff" - def perp_diff_arg = ball_stick_arg ? "" : "--perp_diff $params.perp_diff" - def ball_stick_arg = ball_stick_arg ? "--ball_stick" : "" + def perp_diff_arg = params.ball_stick ? "" : "--perp_diff $params.perp_diff" + def ball_stick_arg = params.ball_stick ? "--ball_stick" : "" - if ( params.use_commit2 ) { + if ( params.use_commit2 && !params.use_both ) { """ scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs/" --ball_stick --commit2 \ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ @@ -85,17 +85,47 @@ process COMPUTE_PRIORS { cpus 1 input: - tuple val(sid), path(fa), path(ad), path(md) + tuple val(sid), path(fa), path(md), path(ad), path(rd) output: - tuple val(sid), path("${sid}__para_diff.txt"), emit: para_diff - tuple val(sid), path("${sid}__iso_diff.txt"), emit: iso_diff + tuple val("Priors"), path("${sid}__para_diff.txt"), emit: para_diff + tuple val("Priors"), path("${sid}__iso_diff.txt"), emit: iso_diff + tuple val("Priors"), path("${sid}__perp_diff.txt"), emit: perp_diff + tuple val(sid), path("${sid}__mask_1fiber.nii.gz"), emit: mask_1fiber + tuple val(sid), path("${sid}__mask_ventricles.nii.gz"), emit: mask_ventricles + when: params.run_commit && params.compute_priors script: """ - scil_compute_NODDI_priors.py $fa $ad $md \ - --out_txt_1fiber ${sid}__para_diff.txt --out_txt_ventricles ${sid}__iso_diff.txt \ + scil_compute_NODDI_priors.py $fa $ad $rd $md \ + --out_txt_1fiber_para ${sid}__para_diff.txt \ + --out_txt_ventricles ${sid}__iso_diff.txt \ + --out_txt_1fiber_perp ${sid}__perp_diff.txt \ + --out_mask_1fiber ${sid}__mask_1fiber.nii.gz \ + --out_mask_ventricles ${sid}__mask_ventricles.nii.gz \ --fa_min $params.fa_min_priors --fa_max $params.fa_max_priors --md_min $params.md_min_priors """ +} + +process AVERAGE_PRIORS { + cpus 1 + + input: + tuple val(sid), path(para_diff), path(iso_diff), path(perp_diff) + + output: + path("mean_para_diff.txt"), emit: mean_para_diff + path("mean_iso_diff.txt"), emit: mean_iso_diff + path("mean_perp_diff.txt"), emit: mean_perp_diff + + script: + """ + cat $para_diff > all_para_diff.txt + awk '{ total += \$1; count++ } END { print total/count }' all_para_diff.txt > mean_para_diff.txt + cat $iso_diff > all_iso_diff.txt + awk '{ total += \$1; count++ } END { print total/count }' all_iso_diff.txt > mean_iso_diff.txt + cat $perp_diff > all_perp_diff.txt + awk '{ total += \$1; count++ } END { print total/count }' all_perp_diff.txt > mean_perp_diff.txt + """ } \ No newline at end of file diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index ca599bf..b8d2a97 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -4,8 +4,8 @@ nextflow.enable.dsl=2 include { TRANSFORM_LABELS; TRANSFORM_T1 } from "../processes/transform.nf" -include { DECOMPOSE_CONNECTIVITY } from "../processes/decompose.nf" -include { DECOMPOSE_CONNECTIVITY as DECOMPOSE_CONNECTIVITY_2 } from "../processes/decompose.nf" +include { DECOMPOSE_CONNECTIVITY as INITIAL_DECOMPOSE } from "../processes/decompose.nf" +include { DECOMPOSE_CONNECTIVITY as FINAL_DECOMPOSE } from "../processes/decompose.nf" include { COMMIT; COMMIT_ON_TRK; COMPUTE_PRIORS } from "../processes/commit.nf" @@ -22,11 +22,11 @@ workflow CONNECTOMICS { metrics_channel anat_channel transfos_channel - fa_ad_md_channel + fa_md_ad_rd_channel main: // ** Computing priors for COMMIT ** // - COMPUTE_PRIORS(fa_ad_md_channel) + COMPUTE_PRIORS(fa_md_ad_rd_channel) // ** If -profile freesurfer, transform t1 to diff space. ** // if ( params.run_freesurfer && !params.run_tracking ) { @@ -59,22 +59,22 @@ workflow CONNECTOMICS { // ** Decomposing tractogram ** // decompose_channel = COMMIT_ON_TRK.out.trk_commit .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) - DECOMPOSE_CONNECTIVITY(decompose_channel) + INITIAL_DECOMPOSE(decompose_channel) // ** Setting output channel ** // - afd_fixel_channel = DECOMPOSE_CONNECTIVITY.out.decompose + afd_fixel_channel = INITIAL_DECOMPOSE.out.decompose .combine(fodf_channel, by: 0) } else { // ** Decomposing tractogram ** // decompose_channel = tracking_channel .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) - DECOMPOSE_CONNECTIVITY(decompose_channel) + INITIAL_DECOMPOSE(decompose_channel) // ** Running COMMIT1 or COMMIT2 ** // if ( params.use_both ) { - commit_channel = DECOMPOSE_CONNECTIVITY.out.decompose + commit_channel = INITIAL_DECOMPOSE.out.decompose .combine(dwi_peaks_channel, by: 0) .combine(COMPUTE_PRIORS.out.para_diff, by: 0) .combine(COMPUTE_PRIORS.out.iso_diff, by: 0) @@ -82,18 +82,18 @@ workflow CONNECTOMICS { COMMIT(commit_channel) decompose_channel = COMMIT.out.trk_commit .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) - DECOMPOSE_CONNECTIVITY_2(decompose_channel) + FINAL_DECOMPOSE(decompose_channel) // ** Setting output channel ** // - afd_fixel_channel = DECOMPOSE_CONNECTIVITY_2.out.decompose + afd_fixel_channel = FINAL_DECOMPOSE.out.decompose .combine(fodf_channel, by: 0) } else { - commit_channel = DECOMPOSE_CONNECTIVITY.out.decompose + commit_channel = INITIAL_DECOMPOSE.out.decompose .combine(dwi_peaks_channel, by: 0) .combine([], by: 0) .combine([], by: 0) - + COMMIT(commit_channel) // ** Setting output channel ** // afd_fixel_channel = COMMIT.out.h5_commit diff --git a/modules/priors/workflows/priors.nf b/modules/priors/workflows/priors.nf new file mode 100644 index 0000000..a3b0955 --- /dev/null +++ b/modules/priors/workflows/priors.nf @@ -0,0 +1,54 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +include { DWI_MASK } from "../../tracking/processes/preprocess.nf" +include { EXTRACT_DTI_SHELL; + DTI_METRICS } from "../../tracking/processes/DTI_processes.nf" +include { COMPUTE_PRIORS; + AVERAGE_PRIORS } from "../../connectomics/processes/commit.nf" +include { COMPUTE_FRF; + MEAN_FRF } from "../../tracking/processes/FODF_processes.nf" + +workflow PRIORS { + take: + dwi_channel + + main: + + // ** Generate the mask ** // + DWI_MASK(dwi_channel) + + // ** Extract the DTI shell ** // + EXTRACT_DTI_SHELL(dwi_channel) + + // ** Compute the DTI metrics ** // + dti_channel = EXTRACT_DTI_SHELL.out.dti_files + .combine(DWI_MASK.out.dwi_mask, by: 0) + DTI_METRICS(dti_channel) + + // ** Compute the priors ** // + priors_channel = DTI_METRICS.out.fa_and_md + .combine(DTI_METRICS.out.ad_and_rd, by: 0) + COMPUTE_PRIORS(priors_channel) + + // ** AVERAGE THE PRIORS ** // + avg_priors_channel = COMPUTE_PRIORS.out.para_diff + .join(COMPUTE_PRIORS.out.iso_diff) + .join(COMPUTE_PRIORS.out.perp_diff) + .groupTuple() + + AVERAGE_PRIORS(avg_priors_channel) + + // ** Compute the FRF ** // + frf_channel = dwi_channel + .combine(DWI_MASK.out.dwi_mask, by: 0) + COMPUTE_FRF(frf_channel) + + // ** Compute the mean FRF ** // + all_frf = COMPUTE_FRF.out.frf + .map{[it[1]]} + .collect() + MEAN_FRF(all_frf) + +} diff --git a/modules/tracking/processes/FODF_processes.nf b/modules/tracking/processes/FODF_processes.nf index 404118c..4874c7f 100644 --- a/modules/tracking/processes/FODF_processes.nf +++ b/modules/tracking/processes/FODF_processes.nf @@ -70,6 +70,7 @@ process COMPUTE_FRF { process MEAN_FRF { cpus 1 + publishDir = "${params.output_dir}/MEAN_FRF" input: path(all_frf) diff --git a/nextflow.config b/nextflow.config index c39d8be..16a439b 100644 --- a/nextflow.config +++ b/nextflow.config @@ -177,6 +177,7 @@ params { run_connectomics = false infant_config = false template_config = false + priors = false // Template Options // references = "./references/" @@ -251,6 +252,10 @@ profiles { } + priors { + params.priors = true + } + infant { params.infant_config = true From c0ef106ca87aa9c0a53d96071641d47ce77b6ecb Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 6 Dec 2023 13:30:49 -0500 Subject: [PATCH 41/54] fix typos and minor bugs --- main.nf | 1 + modules/connectomics/processes/commit.nf | 3 ++- nextflow.config | 3 ++- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/main.nf b/main.nf index 822e15d..f6c571e 100644 --- a/main.nf +++ b/main.nf @@ -337,6 +337,7 @@ def display_usage () { "fa_min_priors":"$params.fa_min_priors", "fa_max_priors":"$params.fa_max_priors", "md_min_priors":"$params.md_min_priors", + "roi_radius_priors":"$params.roi_radius", "run_commit":"$params.run_commit", "use_commit2":"$params.use_commit2", "use_both":"$params.use_both", diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index 4a7bf63..46dc81c 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -104,7 +104,8 @@ process COMPUTE_PRIORS { --out_txt_1fiber_perp ${sid}__perp_diff.txt \ --out_mask_1fiber ${sid}__mask_1fiber.nii.gz \ --out_mask_ventricles ${sid}__mask_ventricles.nii.gz \ - --fa_min $params.fa_min_priors --fa_max $params.fa_max_priors --md_min $params.md_min_priors + --fa_min $params.fa_min_priors --fa_max $params.fa_max_priors \ + --md_min $params.md_min_priors --roi_radius $params.roi_radius_priors """ } diff --git a/nextflow.config b/nextflow.config index 16a439b..663e877 100644 --- a/nextflow.config +++ b/nextflow.config @@ -151,7 +151,8 @@ params { compute_priors = false fa_min_priors = 0.7 fa_max_priors = 0.1 - md_min_priors = 0.003 + md_min_priors = 0.003 + roi_radius_priors = 20 //** COMMIT Options **// run_commit = true From 219c53d44ed97d2ab9508e38b88f3d7aa7a5ea3b Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 12 Dec 2023 19:28:28 -0500 Subject: [PATCH 42/54] add fastsurfer option + BN child atlas --- README.md | 30 +++-- containers/Dockerfile | 79 +++++++++---- containers/apptainer_recipe.def | 108 ++++++++++++------ main.nf | 4 + modules/connectomics/USAGE_ALL | 8 ++ modules/freesurfer/USAGE | 10 +- modules/freesurfer/USAGE_CONN | 8 ++ modules/freesurfer/processes/atlases.nf | 26 ++++- modules/freesurfer/processes/freesurfer.nf | 23 ++++ .../freesurfer/workflows/freesurferflow.nf | 30 ++++- modules/io.nf | 4 + nextflow.config | 10 +- 12 files changed, 256 insertions(+), 84 deletions(-) diff --git a/README.md b/README.md index 7e65e7b..18e910d 100644 --- a/README.md +++ b/README.md @@ -10,38 +10,46 @@ process have been taken: 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) 3. Connectoflow (https://github.com/scilus/connectoflow) -*** Please note that some steps have been removed from the original pipelines if they were not relevant for pediatric data. If you need some of these steps, please use the original pipelines. *** +> [!NOTE] +> Please note that some steps have been removed from the original pipelines if they were not relevant for pediatric data. If you need some of these steps, please use the original pipelines. -NEXTFLOW +Nextflow -------- To install nextflow, please see : https://www.nextflow.io/docs/latest/getstarted.html#requirements -The pipeline export by default a `` parameters.json `` within the output directory to provide a documentation of the parameters used during the execution. For a more detailed report (excluding execution's parameters), -the default feature of nextflow `` -with-report `` can be used to export a html report. Simply had this your command line when launching the pipeline: +The pipeline export by default a `` parameters.json `` within the output directory to provide a documentation of the parameters used during the execution. For a more detailed report (excluding execution's parameters), the default feature of nextflow `` -with-report `` can be used to export a html report. Simply had this your command line when launching the pipeline: -`` nextflow run main.nf --input -with-report `` +``` +nextflow run main.nf --input -with-report +``` -APPTAINER +Apptainer --------- If you are running this pipeline on Linux, it is recommended to run the pipeline using an apptainer image. The pipeline comes with a recipe file (`` /containers/apptainer_recipe.def ``) containing all the required dependencies to successfully run every profiles. To build the apptainer image, run this command: -`` sudo apptainer build . `` +``` +docker build -t . +``` +> [!WARNING] +> Due to the high number of dependencies (ANTs, FSL, MRtrix3, Scilpy, Freesurfer, FastSurfer, etc.), the resulting docker image can be pretty large (~ 40Gb). -USAGE +Usage ----- See _USAGE_ or run `` nextflow run main.nf --help `` for more details. -REFERENCES +References ---------- If you used this pipeline, please cite : diff --git a/containers/Dockerfile b/containers/Dockerfile index a870df3..f579b32 100644 --- a/containers/Dockerfile +++ b/containers/Dockerfile @@ -6,34 +6,63 @@ RUN wget -O FS_BN_GL_SF_utils.tar.gz "https://www.dropbox.com/scl/fi/6s1tc4eanf2 tar -xzvf FS_BN_GL_SF_utils.tar.gz && \ rm FS_BN_GL_SF_utils.tar.gz -# Installing freesurfer on top of scilus:1.5.0 -WORKDIR /root -RUN wget https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.2.0/freesurfer-linux-centos7_x86_64-7.2.0.tar.gz -O fs.tar.gz && \ - tar --no-same-owner -xzvf fs.tar.gz && \ - mv freesurfer /usr/local && \ - rm fs.tar.gz -RUN wget -O /usr/local/freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 +# Installing dependencies. RUN apt-get update && \ - apt-get install csh tcsh && \ - apt-get install -y libglu1-mesa libxt6 libxmu6 libgl1 freeglut3-dev \ - echo "/usr/local/lib" >> /etc/ld.so.conf \ + apt-get install -y csh tcsh && \ + apt-get install -y libglu1-mesa libxt6 libxmu6 libgl1 freeglut3-dev && \ + echo "/usr/local/lib" >> /etc/ld.so.conf && \ ldconfig RUN wget http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 \ tar xjf parallel-latest.tar.bz2 \ cd parallel-* && ./configure && make && make install \ echo 'will cite' | parallel --citation 1> /dev/null 2> /dev/null & +# Setup Conda for FastSurfer +ARG CONDA_FILE=Miniconda3-py38_4.11.0-Linux-x86_64.sh +RUN wget --no-check-certificate -qO ~/miniconda.sh https://repo.continuum.io/miniconda/${CONDA_FILE} && \ + chmod +x ~/miniconda.sh && \ + ~/miniconda.sh -b -p /opt/conda && \ + rm ~/miniconda.sh +ENV PATH /opt/conda/bin:$PATH + +# Setup FastSurfer +WORKDIR / +RUN wget -O FastSurfer.tar.gz "https://github.com/Deep-MI/FastSurfer/archive/refs/tags/v2.1.2.tar.gz" && \ + tar -xzvf FastSurfer.tar.gz && \ + mv FastSurfer-2.1.2 FastSurfer && \ + rm FastSurfer.tar.gz && \ + conda env create -f FastSurfer/fastsurfer_env_cpu.yml + +# Install conda-pack: +RUN conda install -c conda-forge conda-pack + +# Use conda-pack to create a standalone env in /venv: +RUN conda-pack -n fastsurfer_cpu -o /tmp/env.tar && \ + mkdir /venv && cd /venv && tar xf /tmp/env.tar && \ + rm /tmp/env.tar + +# Fix paths. +RUN /venv/bin/conda-unpack +ENV PATH /venv/bin:$PATH + +# Installing freesurfer on top of scilus:1.6.0 +WORKDIR / +RUN wget "https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.3.2/freesurfer-linux-centos7_x86_64-7.3.2.tar.gz" -O fs.tar.gz && \ + tar --no-same-owner -xzvf fs.tar.gz && \ + rm fs.tar.gz +RUN wget -O freesurfer/license.txt "https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0" + # Setup freesurfer env ENV OS=Linux -ENV PATH=${PATH}:/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin -ENV FREESURFER_HOME=/usr/local/freesurfer -ENV FREESURFER=/usr/local/freesurfer -ENV SUBJECTS_DIR=/usr/local/freesurfer/subjects -ENV LOCAL_DIR=/usr/local/freesurfer/local -ENV FSFAST_HOME=/usr/local/freesurfer/fsfast -ENV FMRI_ANALYSIS_DIR=/usr/local/freesurfer/fsfast -ENV FUNCTIONALS_DIR=/usr/local/freesurfer/sessions -ENV FS_LICENSE=/usr/local/freesurfer/license.txt +ENV PATH=${PATH}:/FastSurfer:/freesurfer/bin:/freesurfer/fsfast/bin:/freesurfer/tktools:/freesurfer/mni/bin:/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +ENV FREESURFER_HOME=/freesurfer +ENV FREESURFER=/freesurfer +ENV SUBJECTS_DIR=/freesurfer/subjects +ENV LOCAL_DIR=/freesurfer/local +ENV FSFAST_HOME=/freesurfer/fsfast +ENV FMRI_ANALYSIS_DIR=/freesurfer/fsfast +ENV FUNCTIONALS_DIR=/freesurfer/sessions +ENV FS_LICENSE=/freesurfer/license.txt # set default fs options ENV FS_OVERRIDE=0 @@ -41,9 +70,9 @@ ENV FIX_VERTEX_AREA="" ENV FSF_OUTPUT_FORMAT=nii.gz # mni env requirements -ENV MINC_BIN_DIR=/usr/local/freesurfer/mni/bin -ENV MINC_LIB_DIR=/usr/local/freesurfer/mni/lib -ENV MNI_DIR=/usr/local/freesurfer/mni -ENV MNI_DATAPATH=/usr/local/freesurfer/mni/data -ENV MNI_PERL5LIB=/usr/local/freesurfer/mni/share/perl5 -ENV PERL5LIB=/usr/local/freesurfer/mni/share/perl5 \ No newline at end of file +ENV MINC_BIN_DIR=/freesurfer/mni/bin +ENV MINC_LIB_DIR=/freesurfer/mni/lib +ENV MNI_DIR=/freesurfer/mni +ENV MNI_DATAPATH=/freesurfer/mni/data +ENV MNI_PERL5LIB=/freesurfer/mni/share/perl5 +ENV PERL5LIB=/freesurfer/mni/share/perl5 \ No newline at end of file diff --git a/containers/apptainer_recipe.def b/containers/apptainer_recipe.def index 370c9ad..2dd2ab7 100644 --- a/containers/apptainer_recipe.def +++ b/containers/apptainer_recipe.def @@ -5,16 +5,47 @@ From: scilus/scilus:1.6.0 version ChildBrainFlow-1.0.0 %post - # Installing FreeSurfer on top of scilus:1.5.0 - cd /root - wget https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.2.0/freesurfer-linux-centos7_x86_64-7.2.0.tar.gz -O fs.tar.gz - tar --no-same-owner -xzvf fs.tar.gz - mv freesurfer /usr/local - rm fs.tar.gz - # Install required tcsh and csh. apt-get update apt-get install -y csh tcsh + + # Installing Conda for FastSurfer + export CONDA_FILE=Miniconda3-py38_4.11.0-Linux-x86_64.sh + wget --no-check-certificate -qO ~/miniconda.sh https://repo.continuum.io/miniconda/${CONDA_FILE} + chmod +x ~/miniconda.sh + ~/miniconda.sh -b -p /opt/conda + rm ~/miniconda.sh + export PATH=/opt/conda/bin:$PATH + + # Installing FastSurfer latest stable release. + wget -O FastSurfer.tar.gz "https://github.com/Deep-MI/FastSurfer/archive/refs/tags/v2.1.2.tar.gz" + tar -xzvf FastSurfer.tar.gz -C $APPTAINER_ROOTFS/ + mv $APPTAINER_ROOTFS/FastSurfer-2.1.2 $APPTAINER_ROOTFS/FastSurfer + rm FastSurfer.tar.gz + conda env create -f $APPTAINER_ROOTFS/FastSurfer/fastsurfer_env_cpu.yml + + # Install conda-pack + conda install -c conda-forge conda-pack + + # Use conda-pack to create a standalone env in /venv: + conda-pack -n fastsurfer_cpu -o /tmp/env.tar && \ + mkdir /venv && cd /venv && tar xf /tmp/env.tar && \ + rm /tmp/env.tar + + # Fix paths. + /venv/bin/conda-unpack + export PATH=/venv/bin:$PATH + export PYTHONPATH=/FastSurfer:$PYTHONPATH + + # Downloading checkpoints. + python3.8 $APPTAINER_ROOTFS/FastSurfer/FastSurferCNN/download_checkpoints.py --all + + # Installing FreeSurfer on top of scilus:1.6.0 + cd /root + wget "https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.3.2/freesurfer-linux-centos7_x86_64-7.3.2.tar.gz" -O fs.tar.gz + tar --no-same-owner -xzvf fs.tar.gz + mv freesurfer $APPTAINER_ROOTFS/ + rm fs.tar.gz # Install lib* apt-get install -y libglu1-mesa libxt6 libxmu6 libgl1 freeglut3-dev @@ -22,7 +53,7 @@ From: scilus/scilus:1.6.0 ldconfig # Download additional files - wget -O /usr/local/freesurfer/license.txt https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0 + wget -O $APPTAINER_ROOTFS/freesurfer/license.txt "https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0" wget -O FS_BN_GL_SF_utils.tar.gz "https://www.dropbox.com/scl/fi/6s1tc4eanf2sutejw7fkd/FS_BN_GL_SF_utils.tar.gz?rlkey=3gvhvpepv7ldkqef3go10cb5e&dl=0" && \ tar -xzvf FS_BN_GL_SF_utils.tar.gz -C $APPTAINER_ROOTFS/ && \ rm FS_BN_GL_SF_utils.tar.gz @@ -36,15 +67,15 @@ From: scilus/scilus:1.6.0 # Setup FreeSurfer environment export OS=Linux - export PATH=${PATH}:/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - export FREESURFER_HOME=/usr/local/freesurfer - export FREESURFER=/usr/local/freesurfer - export SUBJECTS_DIR=/usr/local/freesurfer/subjects - export LOCAL_DIR=/usr/local/freesurfer/local - export FSFAST_HOME=/usr/local/freesurfer/fsfast - export FMRI_ANALYSIS_DIR=/usr/local/freesurfer/fsfast - export FUNCTIONALS_DIR=/usr/local/freesurfer/sessions - export FS_LICENSE=/usr/local/freesurfer/license.txt + export PATH=${PATH}:$APPTAINER_ROOTFS/freesurfer/bin:$APPTAINER_ROOTFS/freesurfer/fsfast/bin:$APPTAINER_ROOTFS/freesurfer/tktools:$APPTAINER_ROOTFS/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + export FREESURFER_HOME=$APPTAINER_ROOTFS/freesurfer + export FREESURFER=$APPTAINER_ROOTFS/freesurfer + export SUBJECTS_DIR=$APPTAINER_ROOTFS/freesurfer/subjects + export LOCAL_DIR=$APPTAINER_ROOTFS/freesurfer/local + export FSFAST_HOME=$APPTAINER_ROOTFS/freesurfer/fsfast + export FMRI_ANALYSIS_DIR=$APPTAINER_ROOTFS/freesurfer/fsfast + export FUNCTIONALS_DIR=$APPTAINER_ROOTFS/freesurfer/sessions + export FS_LICENSE=$APPTAINER_ROOTFS/freesurfer/license.txt # Set default FreeSurfer options export FS_OVERRIDE=0 @@ -52,30 +83,31 @@ From: scilus/scilus:1.6.0 export FSF_OUTPUT_FORMAT=nii.gz # Set MNI environment requirements - export MINC_BIN_DIR=/usr/local/freesurfer/mni/bin - export MINC_LIB_DIR=/usr/local/freesurfer/mni/lib - export MNI_DIR=/usr/local/freesurfer/mni - export MNI_DATAPATH=/usr/local/freesurfer/mni/data - export MNI_PERL5LIB=/usr/local/freesurfer/mni/share/perl5 - export PERL5LIB=/usr/local/freesurfer/mni/share/perl5 + export MINC_BIN_DIR=$APPTAINER_ROOTFS/freesurfer/mni/bin + export MINC_LIB_DIR=$APPTAINER_ROOTFS/freesurfer/mni/lib + export MNI_DIR=$APPTAINER_ROOTFS/freesurfer/mni + export MNI_DATAPATH=$APPTAINER_ROOTFS/freesurfer/mni/data + export MNI_PERL5LIB=$APPTAINER_ROOTFS/freesurfer/mni/share/perl5 + export PERL5LIB=$APPTAINER_ROOTFS/freesurfer/mni/share/perl5 %environment export OS=Linux - export PATH=${PATH}:/usr/local/freesurfer/bin:/usr/local/freesurfer/fsfast/bin:/usr/local/freesurfer/tktools:/usr/local/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - export FREESURFER_HOME=/usr/local/freesurfer - export FREESURFER=/usr/local/freesurfer - export SUBJECTS_DIR=/usr/local/freesurfer/subjects - export LOCAL_DIR=/usr/local/freesurfer/local - export FSFAST_HOME=/usr/local/freesurfer/fsfast - export FMRI_ANALYSIS_DIR=/usr/local/freesurfer/fsfast - export FUNCTIONALS_DIR=/usr/local/freesurfer/sessions + export PATH=${PATH}:$APPTAINER_ROOTFS/freesurfer/bin:$APPTAINER_ROOTFS/freesurfer/fsfast/bin:$APPTAINER_ROOTFS/freesurfer/tktools:$APPTAINER_ROOTFS/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + export FREESURFER_HOME=$APPTAINER_ROOTFS/freesurfer + export FREESURFER=$APPTAINER_ROOTFS/freesurfer + export SUBJECTS_DIR=$APPTAINER_ROOTFS/freesurfer/subjects + export LOCAL_DIR=$APPTAINER_ROOTFS/freesurfer/local + export FSFAST_HOME=$APPTAINER_ROOTFS/freesurfer/fsfast + export FMRI_ANALYSIS_DIR=$APPTAINER_ROOTFS/freesurfer/fsfast + export FUNCTIONALS_DIR=$APPTAINER_ROOTFS/freesurfer/sessions + export FS_LICENSE=$APPTAINER_ROOTFS/freesurfer/license.txt export FS_OVERRIDE=0 export FIX_VERTEX_AREA="" export FSF_OUTPUT_FORMAT=nii.gz - export MINC_BIN_DIR=/usr/local/freesurfer/mni/bin - export MINC_LIB_DIR=/usr/local/freesurfer/mni/lib - export MNI_DIR=/usr/local/freesurfer/mni - export MNI_DATAPATH=/usr/local/freesurfer/mni/data - export MNI_PERL5LIB=/usr/local/freesurfer/mni/share/perl5 - export PERL5LIB=/usr/local/freesurfer/mni/share/perl5 - export FS_LICENSE=/usr/local/freesurfer/license.txt + export MINC_BIN_DIR=$APPTAINER_ROOTFS/freesurfer/mni/bin + export MINC_LIB_DIR=$APPTAINER_ROOTFS/freesurfer/mni/lib + export MNI_DIR=$APPTAINER_ROOTFS/freesurfer/mni + export MNI_DATAPATH=$APPTAINER_ROOTFS/freesurfer/mni/data + export MNI_PERL5LIB=$APPTAINER_ROOTFS/freesurfer/mni/share/perl5 + export PERL5LIB=$APPTAINER_ROOTFS/freesurfer/mni/share/perl5 + export PATH=/venv/bin:$PATH diff --git a/main.nf b/main.nf index f6c571e..9a059db 100644 --- a/main.nf +++ b/main.nf @@ -352,8 +352,11 @@ def display_usage () { "processes_afd_fixel":"$params.processes_afd_fixel", "processes_connectivity":"$params.processes_connectivity", "references":"$params.references", + "recon_all":"$params.recon_all", + "recon_surf":"$params.recon_surf", "use_freesurfer_atlas":"$params.use_freesurfer_atlas", "use_brainnetome_atlas":"$params.use_brainnetome_atlas", + "use_brainnetome_child_atlas":"$params.use_brainnetome_child_atlas", "use_glasser_atlas":"$params.use_glasser_atlas", "use_schaefer_100_atlas":"$params.use_schaefer_100_atlas", "use_schaefer_200_atlas":"$params.use_schaefer_200_atlas", @@ -367,6 +370,7 @@ def display_usage () { "nb_threads":"$params.nb_threads", "atlas_utils_folder":"$params.atlas_utils_folder", "compute_FS_BN_GL_SF":"$params.compute_FS_BN_GL_SF", + "compute_BN_child":"$params.compute_BN_child", "compute_lausanne_multiscale":"$params.compute_lausanne_multiscale", "compute_lobes":"$params.compute_lobes", "run_freesurfer":"$params.run_freesurfer", diff --git a/modules/connectomics/USAGE_ALL b/modules/connectomics/USAGE_ALL index baf0311..3111870 100644 --- a/modules/connectomics/USAGE_ALL +++ b/modules/connectomics/USAGE_ALL @@ -188,10 +188,16 @@ OPTIONAL ARGUMENTS (current value) [FREESURFERFLOW OPTIONS] + --recon_all If set, will use traditional freesurfer recon-all command to produce + anatomical surfaces. ($recon_all) + --recon_surf If set, will use CNN based FastSurfer and recon-surf to produce + anatomical surfaces (way faster!). ($recon_surf) --use_freesurfer_atlas If set, will use the freesurfer atlas if -profile connectomics is used. ($use_freesurfer_atlas) --use_brainnetome_atlas If set, will use the brainnetome atlas if -profile connectomics is used. This is the default setting. ($use_brainnetome_atlas) + --use_brainnetome_child_atlas If set, will use the brainnetome child atlas if -profile connectomcis + is used. This is the default setting. ($use_brainnetome_child_atlas) --use_glasser_atlas If set, will use the Glasser atlas if -profile connectomics is used. ($use_glasser_atlas) --use_schaefer_100_atlas If set, will use the Schaefer 100 atlas if -profile connectomics is used. @@ -216,6 +222,8 @@ OPTIONAL ARGUMENTS (current value) the path of folder within the container. ($atlas_utils_folder) --nb_threads Number of threads used by recon-all and the atlases creation ($nb_threads) + --compute_BN_child Compute the connectivity-friendly Brainnetome Child atlas. + ($compute_BN_child) --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) * FreeSurfer (adapted) * Brainnetome diff --git a/modules/freesurfer/USAGE b/modules/freesurfer/USAGE index c189400..bff00e6 100644 --- a/modules/freesurfer/USAGE +++ b/modules/freesurfer/USAGE @@ -28,10 +28,16 @@ DESCRIPTION └-- S2 └-- *t1.nii.gz + --recon_all If set, will use traditional freesurfer recon-all command to produce + anatomical surfaces. ($recon_all) + --recon_surf If set, will use CNN based FastSurfer and recon-surf to produce + anatomical surfaces (way faster!). ($recon_surf) --use_freesurfer_atlas If set, will use the freesurfer atlas if -profile connectomics is used. ($use_freesurfer_atlas) --use_brainnetome_atlas If set, will use the brainnetome atlas if -profile connectomics is used. - This is the default setting. ($use_brainnetome_atlas) + ($use_brainnetome_atlas) + --use_brainnetome_child_atlas If set, will use the brainnetome child atlas if -profile connectomcis + is used. This is the default setting. ($use_brainnetome_child_atlas) --use_glasser_atlas If set, will use the Glasser atlas if -profile connectomics is used. ($use_glasser_atlas) --use_schaefer_100_atlas If set, will use the Schaefer 100 atlas if -profile connectomics is used. @@ -62,6 +68,8 @@ OPTIONAL ARGUMENTS (current value) the path of folder within the container. ($atlas_utils_folder) --nb_threads Number of threads used by recon-all and the atlases creation ($nb_threads) + --compute_BN_child Compute the connectivity-friendly Brainnetome Child atlas. + ($compute_BN_child) --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) * FreeSurfer (adapted) * Brainnetome diff --git a/modules/freesurfer/USAGE_CONN b/modules/freesurfer/USAGE_CONN index 33659d2..5ab9bc8 100644 --- a/modules/freesurfer/USAGE_CONN +++ b/modules/freesurfer/USAGE_CONN @@ -49,10 +49,16 @@ DESCRIPTION └-- metrics └-- METRIC_NAME.nii.gz [Optional] + --recon_all If set, will use traditional freesurfer recon-all command to produce + anatomical surfaces. ($recon_all) + --recon_surf If set, will use CNN based FastSurfer and recon-surf to produce + anatomical surfaces (way faster!). ($recon_surf) --use_freesurfer_atlas If set, will use the freesurfer atlas if -profile connectomics is used. ($use_freesurfer_atlas) --use_brainnetome_atlas If set, will use the brainnetome atlas if -profile connectomics is used. This is the default setting. ($use_brainnetome_atlas) + --use_brainnetome_child_atlas If set, will use the brainnetome child atlas if -profile connectomcis + is used. This is the default setting. ($use_brainnetome_child_atlas) --use_glasser_atlas If set, will use the Glasser atlas if -profile connectomics is used. ($use_glasser_atlas) --use_schaefer_100_atlas If set, will use the Schaefer 100 atlas if -profile connectomics is used. @@ -83,6 +89,8 @@ OPTIONAL ARGUMENTS (current value) the path of folder within the container. ($atlas_utils_folder) --nb_threads Number of threads used by recon-all and the atlases creation ($nb_threads) + --compute_BN_child Compute the connectivity-friendly Brainnetome Child atlas. + ($compute_BN_child) --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) * FreeSurfer (adapted) * Brainnetome diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index c70acaf..d08a213 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -36,6 +36,30 @@ process FS_BN_GL_SF { """ } +process BN_CHILD { + cpus params.nb_threads + + input: + tuple val(sid), path(folder) + + output: + tuple val(sid), path("*brainnetome_child_v1.nii.gz"), emit: brainnetome_child + tuple val(sid), path("*brainnetome_child_v1_dilate.nii.gz"), emit: brainnetome_child_dilated + path("*[brainnetome_child]*.txt") + path("*[brainnetome_child]*.json") + + when: + params.compute_BN_child + + script: + """ + ln -s $params.atlas_utils_folder/fsaverage \$(dirname ${folder})/ + bash $params.atlas_utils_folder/freesurfer_utils/generate_atlas_BN_child.sh \$(dirname ${folder}) \ + ${sid} ${params.nb_threads} Child_Atlas/ + cp $sid/Child_Atlas/* ./ + """ +} + process LOBES { cpus params.nb_threads @@ -101,7 +125,7 @@ process LAUSANNE { """ ln -s $params.atlas_utils_folder/fsaverage \$(dirname ${folder})/ freesurfer_home=\$(dirname \$(dirname \$(which mri_label2vol))) - python $params.atlas_utils_folder/lausanne_multi_scale_atlas/generate_multiscale_parcellation.py \ + /usr/bin/python $params.atlas_utils_folder/lausanne_multi_scale_atlas/generate_multiscale_parcellation.py \ \$(dirname ${folder}) ${sid} \$freesurfer_home --scale ${scale} --dilation_factor 0 --log_level DEBUG mri_convert ${folder}/mri/rawavg.mgz rawavg.nii.gz diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index 5700e3f..ecc5005 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -17,4 +17,27 @@ process FREESURFER { recon-all -i $anat -s $sid -all -parallel -openmp $params.nb_threads mri_convert $sid/mri/antsdn.brain.mgz ${sid}__final_t1.nii.gz """ +} + +process RECON_SURF { + cpus params.nb_threads + + input: + tuple val(sid), path(anat) + output: + tuple val(sid), path("$sid/"), emit: folders + tuple val(sid), path("${sid}__final_t1.nii.gz"), emit: final_t1 + + script: + """ + mkdir output/ + bash /FastSurfer/run_fastsurfer.sh --sd \$(readlink -f ./) --sid $sid \\ + --t1 \$(readlink -f $anat) \ + --fs_license /freesurfer/license.txt \ + --parallel --device cpu --threads $params.nb_threads --allow_root + mri_ca_register -align-after -nobigventricles -mask $sid/mri/brainmask.mgz \ + -T $sid/mri/transforms/talairach.lta -threads $params.nb_threads $sid/mri/norm.mgz \ + \${FREESURFER_HOME}/average/talairach_mixed_with_skull.gca $sid/mri/transforms/talairach.m3z + mri_convert $sid/mri/antsdn.brain.mgz ${sid}__final_t1.nii.gz + """ } \ No newline at end of file diff --git a/modules/freesurfer/workflows/freesurferflow.nf b/modules/freesurfer/workflows/freesurferflow.nf index 8388da9..e5d1b12 100644 --- a/modules/freesurfer/workflows/freesurferflow.nf +++ b/modules/freesurfer/workflows/freesurferflow.nf @@ -3,10 +3,12 @@ nextflow.enable.dsl=2 include { - FREESURFER + FREESURFER; + RECON_SURF } from '../processes/freesurfer.nf' include { FS_BN_GL_SF; + BN_CHILD; LOBES; LAUSANNE } from '../processes/atlases.nf' @@ -17,18 +19,30 @@ workflow FREESURFERFLOW { main: + if ( params.recon_all ) { // ** Lauching FreeSurfer Recon-all ** // FREESURFER(anat) + folder_channel = FREESURFER.out.folders + t1 = FREESURFER.out.final_t1 + } else if ( params.recon_surf ) { + // ** Launching FastSurfer ** // + RECON_SURF(anat) + folder_channel = RECON_SURF.out.folders + t1 = RECON_SURF.out.final_t1 + } // ** Computing FS_BN_GL_SF atlases ** // - FS_BN_GL_SF(FREESURFER.out.folders) + FS_BN_GL_SF(folder_channel) + + // ** Computing BN_CHILD Atlas ** // + BN_CHILD(folder_channel) // ** Computing lobes atlases ** // - LOBES(FREESURFER.out.folders) + LOBES(folder_channel) // ** Computing lausanne atlas ** // scales = Channel.from(1,2,3,4,5) - LAUSANNE(FREESURFER.out.folders, + LAUSANNE(folder_channel, scales) // ** Reorganizing Lausanne multiscale atlas channel ** // @@ -57,6 +71,12 @@ workflow FREESURFERFLOW { } else { labels = FS_BN_GL_SF.out.brainnetome } + } else if ( params.use_brainnetome_child_atlas ) { + if ( params.use_dilated_labels ) { + labels = BN_CHILD.out.brainnetome_child_dilated + } else { + labels = BN_CHILD.out.brainnetome_child + } } else if ( params.use_glasser_atlas ) { if ( params.use_dilated_labels ) { labels = FS_BN_GL_SF.out.glasser_dilated @@ -115,5 +135,5 @@ workflow FREESURFERFLOW { emit: labels - t1 = FREESURFER.out.final_t1 + t1 } \ No newline at end of file diff --git a/modules/io.nf b/modules/io.nf index 3f45e2a..afad941 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -450,8 +450,11 @@ def display_run_info () { if ( params.run_freesurfer ) { log.info "[Freesurfer Options]" log.info "" + log.info "Use Recon-all: $params.recon_all" + log.info "Use FastSurfer + Recon-Surf: $params.recon_surf" log.info "Atlas utils folder: $params.atlas_utils_folder" log.info "Compute FS, BN, GL, SF: $params.compute_FS_BN_GL_SF" + log.info "Compute BN Child: $params.compute_BN_child" log.info "Compute lobes: $params.compute_lobes" log.info "Compute lausanne multiscale: $params.compute_lausanne_multiscale" log.info "Number of threads: $params.nb_threads" @@ -459,6 +462,7 @@ def display_run_info () { log.info "ATLAS SELECTION" log.info "Use Freesurfer atlas: $params.use_freesurfer_atlas" log.info "Use Brainnetome atlas: $params.use_brainnetome_atlas" + log.info "Use Brainnetome Child atlas: $params.use_brainnetome_child_atlas" log.info "Use Glasser atlas: $params.use_glasser_atlas" log.info "Use Schaefer 100 atlas: $params.use_schaefer_100_atlas" log.info "Use Schaefer 200 atlas: $params.use_schaefer_200_atlas" diff --git a/nextflow.config b/nextflow.config index 663e877..39a6045 100644 --- a/nextflow.config +++ b/nextflow.config @@ -187,8 +187,11 @@ params { Pop_Avg_Publish_Dir = "./Results_ChildBrainFlow/Pop_Avg" // ** FreeSurfer Options ** // + recon_all = false + recon_surf = true use_freesurfer_atlas = false - use_brainnetome_atlas = true + use_brainnetome_atlas = false + use_brainnetome_child_atlas = true use_glasser_atlas = false use_schaefer_100_atlas = false use_schaefer_200_atlas = false @@ -201,8 +204,9 @@ params { use_dilated_labels = false nb_threads = 4 atlas_utils_folder = "/FS_BN_GL_SF_utils/" - compute_FS_BN_GL_SF = true - compute_lausanne_multiscale = true + compute_FS_BN_GL_SF = false + compute_BN_child = true + compute_lausanne_multiscale = false compute_lobes = false // ** Output Options ** // From 544cc6b85f32db0e1c2a4e1d94cdbcd2c94ee374 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Tue, 12 Dec 2023 20:13:06 -0500 Subject: [PATCH 43/54] fix typo --- main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.nf b/main.nf index 9a059db..f7c1b13 100644 --- a/main.nf +++ b/main.nf @@ -173,7 +173,7 @@ workflow { metrics_flat, t2w, transfos, - fa_md_ad_channel) + fa_md_ad_rd_channel) } if ( params.run_connectomics && !params.run_tracking ) { From d490750c0aa36d135d4ff72fe92fffb5203d0f03 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 13 Dec 2023 13:43:26 -0500 Subject: [PATCH 44/54] fix commit with priors --- main.nf | 2 +- modules/connectomics/USAGE | 2 +- modules/connectomics/USAGE_ALL | 2 +- modules/connectomics/USAGE_TRACKING | 2 +- modules/connectomics/processes/commit.nf | 6 +-- .../connectomics/workflows/connectomics.nf | 42 ++++++++++++++----- modules/io.nf | 1 + nextflow.config | 2 +- 8 files changed, 40 insertions(+), 19 deletions(-) diff --git a/main.nf b/main.nf index f7c1b13..20cebc0 100644 --- a/main.nf +++ b/main.nf @@ -340,7 +340,7 @@ def display_usage () { "roi_radius_priors":"$params.roi_radius", "run_commit":"$params.run_commit", "use_commit2":"$params.use_commit2", - "use_both":"$params.use_both", + "use_both_commit":"$params.use_both_commit", "commit_on_trk":"$params.commit_on_trk", "b_thr":"$params.b_thr", "ball_stick":"$params.ball_stick", diff --git a/modules/connectomics/USAGE b/modules/connectomics/USAGE index a95ce6a..4141338 100644 --- a/modules/connectomics/USAGE +++ b/modules/connectomics/USAGE @@ -67,7 +67,7 @@ DESCRIPTION --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) COMMIT2 output will replaced the COMMIT1 output. - --use_both If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell + --use_both_commit If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell data. ($use_both) --b_thr Tolerance value to considier bvalues to be the same shell. --nbr_dir Number of directions, (half sphere), representing the possible diff --git a/modules/connectomics/USAGE_ALL b/modules/connectomics/USAGE_ALL index 3111870..e039e87 100644 --- a/modules/connectomics/USAGE_ALL +++ b/modules/connectomics/USAGE_ALL @@ -262,7 +262,7 @@ OPTIONAL ARGUMENTS (current value) --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) COMMIT2 output will replaced the COMMIT1 output. - --use_both If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell + --use_both_commit If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell data. ($use_both) --b_thr Tolerance value to considier bvalues to be the same shell. --nbr_dir Number of directions, (half sphere), representing the possible diff --git a/modules/connectomics/USAGE_TRACKING b/modules/connectomics/USAGE_TRACKING index f7d1fad..cf82391 100644 --- a/modules/connectomics/USAGE_TRACKING +++ b/modules/connectomics/USAGE_TRACKING @@ -219,7 +219,7 @@ OPTIONAL ARGUMENTS (current value) --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) COMMIT2 output will replaced the COMMIT1 output. - --use_both If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell + --use_both_commit If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell data. ($use_both) --b_thr Tolerance value to considier bvalues to be the same shell. --nbr_dir Number of directions, (half sphere), representing the possible diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index 46dc81c..f6d52e1 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -7,7 +7,7 @@ process COMMIT { memory params.commit_memory_limit input: - tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks), path(para_diff), path(iso_diff) + tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks), path(para_diff), path(iso_diff), path(perp_diff) output: tuple val(sid), path("${sid}__decompose_commit.h5"), emit: h5_commit, optional: true tuple val(sid), path("${sid}__essential_tractogram.trk"), emit: trk_commit, optional: true @@ -19,8 +19,8 @@ process COMMIT { script: def para_diff_arg = para_diff ? "--para_diff \$(cat $para_diff)" : "--para_diff $params.para_diff" - def iso_diff_arg = iso_diff ? "--iso_diff \$(cat $iso_diff)" : "--iso_diff $params.iso_diff" - def perp_diff_arg = params.ball_stick ? "" : "--perp_diff $params.perp_diff" + def iso_diff_arg = iso_diff ? "--iso_diff \$(cat $iso_diff)" : "--iso_diff $params.iso_diff" + def perp_diff_arg = perp_diff ? "--perp_diff \$(cat $perp_diff)" : "--perp_diff $params.perp_diff" def ball_stick_arg = params.ball_stick ? "--ball_stick" : "" if ( params.use_commit2 && !params.use_both ) { diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index b8d2a97..aac0173 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -74,12 +74,22 @@ workflow CONNECTOMICS { // ** Running COMMIT1 or COMMIT2 ** // if ( params.use_both ) { - commit_channel = INITIAL_DECOMPOSE.out.decompose - .combine(dwi_peaks_channel, by: 0) - .combine(COMPUTE_PRIORS.out.para_diff, by: 0) - .combine(COMPUTE_PRIORS.out.iso_diff, by: 0) + if ( params.compute_priors ) { + commit_channel = INITIAL_DECOMPOSE.out.decompose + .combine(dwi_peaks_channel, by: 0) + .combine(COMPUTE_PRIORS.out.para_diff, by: 0) + .combine(COMPUTE_PRIORS.out.iso_diff, by: 0) + .combine(COMPUTE_PRIORS.out.perp_diff, by: 0) + COMMIT(commit_channel) + } else { + commit_channel = INITIAL_DECOMPOSE.out.decompose + .combine(dwi_peaks_channel, by: 0) + .combine(Channel.of([[]])) + .combine(Channel.of([[]])) + .combine(Channel.of([[]])) + COMMIT(commit_channel) + } - COMMIT(commit_channel) decompose_channel = COMMIT.out.trk_commit .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) FINAL_DECOMPOSE(decompose_channel) @@ -89,12 +99,22 @@ workflow CONNECTOMICS { .combine(fodf_channel, by: 0) } else { - commit_channel = INITIAL_DECOMPOSE.out.decompose - .combine(dwi_peaks_channel, by: 0) - .combine([], by: 0) - .combine([], by: 0) - - COMMIT(commit_channel) + + if ( params.compute_priors ) { + commit_channel = INITIAL_DECOMPOSE.out.decompose + .combine(dwi_peaks_channel, by: 0) + .combine(COMPUTE_PRIORS.out.para_diff, by: 0) + .combine(COMPUTE_PRIORS.out.iso_diff, by: 0) + .combine(COMPUTE_PRIORS.out.perp_diff, by: 0) + COMMIT(commit_channel) + } else { + commit_channel = INITIAL_DECOMPOSE.out.decompose + .combine(dwi_peaks_channel, by: 0) + .combine(Channel.of([[]])) + .combine(Channel.of([[]])) + .combine(Channel.of([[]])) + COMMIT(commit_channel) + } // ** Setting output channel ** // afd_fixel_channel = COMMIT.out.h5_commit .combine(fodf_channel, by: 0) diff --git a/modules/io.nf b/modules/io.nf index afad941..f560eec 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -491,6 +491,7 @@ def display_run_info () { log.info "COMMIT OPTIONS" log.info "Run COMMIT: $params.run_commit" log.info "Use COMMIT2: $params.use_commit2" + log.info "Use both COMMIT: $params.use_both_commit" log.info "COMMIT on trk: $params.commit_on_trk" log.info "B-value threshold: $params.b_thr" log.info "Number of directions: $params.nbr_dir" diff --git a/nextflow.config b/nextflow.config index 39a6045..9449b88 100644 --- a/nextflow.config +++ b/nextflow.config @@ -157,7 +157,7 @@ params { //** COMMIT Options **// run_commit = true use_commit2 = true - use_both = false + use_both_commit = false commit_on_trk = false b_thr = 50 nbr_dir = 500 From da0cb05f5c538994ac96cb3bf73deea872269df7 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 13 Dec 2023 13:45:36 -0500 Subject: [PATCH 45/54] fix typo in USAGE --- modules/connectomics/USAGE | 2 +- modules/connectomics/USAGE_ALL | 2 +- modules/connectomics/USAGE_TRACKING | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/connectomics/USAGE b/modules/connectomics/USAGE index 4141338..1c91f4e 100644 --- a/modules/connectomics/USAGE +++ b/modules/connectomics/USAGE @@ -68,7 +68,7 @@ DESCRIPTION --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) COMMIT2 output will replaced the COMMIT1 output. --use_both_commit If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell - data. ($use_both) + data. ($use_both_commit) --b_thr Tolerance value to considier bvalues to be the same shell. --nbr_dir Number of directions, (half sphere), representing the possible orientations of the response functions ($nbr_dir) diff --git a/modules/connectomics/USAGE_ALL b/modules/connectomics/USAGE_ALL index e039e87..b774ab5 100644 --- a/modules/connectomics/USAGE_ALL +++ b/modules/connectomics/USAGE_ALL @@ -263,7 +263,7 @@ OPTIONAL ARGUMENTS (current value) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) COMMIT2 output will replaced the COMMIT1 output. --use_both_commit If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell - data. ($use_both) + data. ($use_both_commit) --b_thr Tolerance value to considier bvalues to be the same shell. --nbr_dir Number of directions, (half sphere), representing the possible orientations of the response functions ($nbr_dir) diff --git a/modules/connectomics/USAGE_TRACKING b/modules/connectomics/USAGE_TRACKING index cf82391..14f8253 100644 --- a/modules/connectomics/USAGE_TRACKING +++ b/modules/connectomics/USAGE_TRACKING @@ -220,7 +220,7 @@ OPTIONAL ARGUMENTS (current value) --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) COMMIT2 output will replaced the COMMIT1 output. --use_both_commit If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell - data. ($use_both) + data. ($use_both_commit) --b_thr Tolerance value to considier bvalues to be the same shell. --nbr_dir Number of directions, (half sphere), representing the possible orientations of the response functions ($nbr_dir) From a9660374ca18140fd00658729340a9cf7b1a2fda Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 14 Dec 2023 00:11:40 -0500 Subject: [PATCH 46/54] change .gca atlas to match 7.3.2 version --- modules/freesurfer/processes/freesurfer.nf | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index ecc5005..6e93f6d 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -29,6 +29,9 @@ process RECON_SURF { tuple val(sid), path("${sid}__final_t1.nii.gz"), emit: final_t1 script: + // ** Adding a registration to .gca atlas to generate the talairach.m3z file (subcortical atlas segmentation ** // + // ** wont work without it). A little time consuming but necessary. For FreeSurfer 7.3.2, RB_all_2020-01-02.gca ** // + // ** is the default atlas. Update when bumping FreeSurfer version. ** // """ mkdir output/ bash /FastSurfer/run_fastsurfer.sh --sd \$(readlink -f ./) --sid $sid \\ @@ -37,7 +40,7 @@ process RECON_SURF { --parallel --device cpu --threads $params.nb_threads --allow_root mri_ca_register -align-after -nobigventricles -mask $sid/mri/brainmask.mgz \ -T $sid/mri/transforms/talairach.lta -threads $params.nb_threads $sid/mri/norm.mgz \ - \${FREESURFER_HOME}/average/talairach_mixed_with_skull.gca $sid/mri/transforms/talairach.m3z + \${FREESURFER_HOME}/average/RB_all_2020-01-02.gca $sid/mri/transforms/talairach.m3z mri_convert $sid/mri/antsdn.brain.mgz ${sid}__final_t1.nii.gz """ } \ No newline at end of file From b0cdd8d2262c50168c8842575ec8fa4098f4db12 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Fri, 22 Dec 2023 15:21:00 -0500 Subject: [PATCH 47/54] updated slurm time and memory constraints --- modules/connectomics/processes/commit.nf | 14 ++++-- .../connectomics/processes/compute_metrics.nf | 6 ++- modules/connectomics/processes/decompose.nf | 3 +- modules/connectomics/processes/transform.nf | 6 ++- modules/connectomics/processes/viz.nf | 3 +- .../connectomics/workflows/connectomics.nf | 2 +- modules/freesurfer/processes/atlases.nf | 8 ++++ modules/freesurfer/processes/freesurfer.nf | 5 +++ modules/tracking/processes/DTI_processes.nf | 8 +++- modules/tracking/processes/FODF_processes.nf | 12 +++++- modules/tracking/processes/SH_processes.nf | 6 ++- modules/tracking/processes/preprocess.nf | 43 ++++++++++++++++--- .../processes/registration_processes.nf | 4 ++ .../tracking/processes/tracking_processes.nf | 18 ++++++++ nextflow.config | 18 ++++++-- 15 files changed, 132 insertions(+), 24 deletions(-) diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index f6d52e1..940ace1 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -4,7 +4,8 @@ nextflow.enable.dsl=2 process COMMIT { cpus params.processes_commit - memory params.commit_memory_limit + memory { params.commit_memory_limit * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks), path(para_diff), path(iso_diff), path(perp_diff) @@ -23,7 +24,7 @@ process COMMIT { def perp_diff_arg = perp_diff ? "--perp_diff \$(cat $perp_diff)" : "--perp_diff $params.perp_diff" def ball_stick_arg = params.ball_stick ? "--ball_stick" : "" - if ( params.use_commit2 && !params.use_both ) { + if ( params.use_commit2 && !params.use_both_commit ) { """ scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs/" --ball_stick --commit2 \ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ @@ -31,7 +32,7 @@ process COMMIT { mv "${sid}__results_bzs/commit_2/decompose_commit.h5" "./${sid}__decompose_commit.h5" """ } - else if ( params.use_both ) { + else if ( params.use_both_commit ) { """ scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs_1/" --ball_stick --commit2 \ --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ @@ -54,7 +55,8 @@ process COMMIT { process COMMIT_ON_TRK { cpus params.processes_commit - memory params.commit_memory_limit + memory { params.commit_memory_limit * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(trk_h5), path(dwi), path(bval), path(bvec), path(peaks) @@ -83,6 +85,8 @@ process COMMIT_ON_TRK { process COMPUTE_PRIORS { cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(fa), path(md), path(ad), path(rd) @@ -111,6 +115,8 @@ process COMPUTE_PRIORS { process AVERAGE_PRIORS { cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(para_diff), path(iso_diff), path(perp_diff) diff --git a/modules/connectomics/processes/compute_metrics.nf b/modules/connectomics/processes/compute_metrics.nf index a9b72b5..c314343 100644 --- a/modules/connectomics/processes/compute_metrics.nf +++ b/modules/connectomics/processes/compute_metrics.nf @@ -4,7 +4,8 @@ nextflow.enable.dsl=2 process COMPUTE_AFD_FIXEL { cpus params.processes_afd_fixel - memory '2 GB' + memory { 8.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(h5), path(fodf) @@ -19,7 +20,8 @@ process COMPUTE_AFD_FIXEL { process COMPUTE_CONNECTIVITY { cpus params.processes_connectivity - memory '2 GB' + memory { 8.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(h5), path(labels), path(metrics) diff --git a/modules/connectomics/processes/decompose.nf b/modules/connectomics/processes/decompose.nf index 1a20dc2..3f72780 100644 --- a/modules/connectomics/processes/decompose.nf +++ b/modules/connectomics/processes/decompose.nf @@ -4,7 +4,8 @@ nextflow.enable.dsl=2 process DECOMPOSE_CONNECTIVITY { cpus 1 - memory { 7.B * trk.size() } + memory { 16.GB * task.attempt } + time { 6.hour * task.attempt } input: tuple val(sid), path(trk), path(labels) diff --git a/modules/connectomics/processes/transform.nf b/modules/connectomics/processes/transform.nf index d4d086c..ac61d16 100644 --- a/modules/connectomics/processes/transform.nf +++ b/modules/connectomics/processes/transform.nf @@ -4,7 +4,8 @@ nextflow.enable.dsl=2 process TRANSFORM_LABELS { cpus 1 - memory '2 GB' + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(labels), path(t2), path(mat), path(syn) @@ -21,7 +22,8 @@ process TRANSFORM_LABELS { process TRANSFORM_T1 { cpus 1 - memory '2 GB' + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(t1), path(dwi), path(bval), path(bvec), path(mat), path(syn) diff --git a/modules/connectomics/processes/viz.nf b/modules/connectomics/processes/viz.nf index 3fd6a92..d0f44cb 100644 --- a/modules/connectomics/processes/viz.nf +++ b/modules/connectomics/processes/viz.nf @@ -4,7 +4,8 @@ nextflow.enable.dsl=2 process VISUALIZE_CONNECTIVITY { cpus 1 - memory "2 GB" + memory { 2.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(npy) diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index aac0173..75cabe3 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -72,7 +72,7 @@ workflow CONNECTOMICS { INITIAL_DECOMPOSE(decompose_channel) // ** Running COMMIT1 or COMMIT2 ** // - if ( params.use_both ) { + if ( params.use_both_commit ) { if ( params.compute_priors ) { commit_channel = INITIAL_DECOMPOSE.out.decompose diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index d08a213..fdafcb4 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -4,6 +4,8 @@ nextflow.enable.dsl=2 process FS_BN_GL_SF { cpus params.nb_threads + memory { 24.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(folder) @@ -38,6 +40,8 @@ process FS_BN_GL_SF { process BN_CHILD { cpus params.nb_threads + memory { 24.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(folder) @@ -62,6 +66,8 @@ process BN_CHILD { process LOBES { cpus params.nb_threads + memory { 24.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(folder) @@ -104,6 +110,8 @@ process LOBES { process LAUSANNE { cpus 1 + memory { 24.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(folder) diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index 6e93f6d..b5b3d25 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -4,6 +4,8 @@ nextflow.enable.dsl=2 process FREESURFER { cpus params.nb_threads + memory { 24.GB * task.attempt } + time { 6.hour * task.attempt } input: tuple val(sid), path(anat) @@ -21,6 +23,9 @@ process FREESURFER { process RECON_SURF { cpus params.nb_threads + memory { 24.GB * task.attempt } + time { 6.hour * task.attempt } + input: tuple val(sid), path(anat) diff --git a/modules/tracking/processes/DTI_processes.nf b/modules/tracking/processes/DTI_processes.nf index ffb9e43..7987c30 100644 --- a/modules/tracking/processes/DTI_processes.nf +++ b/modules/tracking/processes/DTI_processes.nf @@ -3,7 +3,9 @@ nextflow.enable.dsl=2 process EXTRACT_DTI_SHELL { - cpus 3 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -36,7 +38,9 @@ process EXTRACT_DTI_SHELL { } process DTI_METRICS { - cpus 3 + cpus 1 + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) diff --git a/modules/tracking/processes/FODF_processes.nf b/modules/tracking/processes/FODF_processes.nf index 4874c7f..5cfa147 100644 --- a/modules/tracking/processes/FODF_processes.nf +++ b/modules/tracking/processes/FODF_processes.nf @@ -3,7 +3,9 @@ nextflow.enable.dsl=2 process FODF_SHELL { - cpus 3 + cpus 1 + memory { 8.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -40,7 +42,9 @@ process FODF_SHELL { } process COMPUTE_FRF { - cpus 3 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -70,6 +74,8 @@ process COMPUTE_FRF { process MEAN_FRF { cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } publishDir = "${params.output_dir}/MEAN_FRF" input: @@ -87,6 +93,8 @@ process MEAN_FRF { process FODF_METRICS { cpus params.processes_fodf + memory { 8.GB * task.attempt } + time { 3.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask), path(fa), path(md), path(frf) diff --git a/modules/tracking/processes/SH_processes.nf b/modules/tracking/processes/SH_processes.nf index 9c7d5aa..5344ff3 100644 --- a/modules/tracking/processes/SH_processes.nf +++ b/modules/tracking/processes/SH_processes.nf @@ -3,7 +3,9 @@ nextflow.enable.dsl=2 process SH_FITTING_SHELL { - cpus 3 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.h * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -23,6 +25,8 @@ process SH_FITTING_SHELL { process SH_FITTING { cpus 1 + memory { 8.GB * task.attempt } + time { 1.h * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index 2acb6d2..f4d56ad 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -3,7 +3,9 @@ nextflow.enable.dsl=2 process BET_DWI { - cpus 2 + cpus 1 + memory { 4.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -38,7 +40,9 @@ process BET_DWI { } process BET_T2 { - cpus 2 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(anat) @@ -57,6 +61,8 @@ process BET_T2 { process DENOISING { cpus params.processes_denoise_dwi + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi) @@ -70,13 +76,15 @@ process DENOISING { export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - dwidenoise $dwi ${sid}__dwi_denoised.nii.gz -extent 7 -nthreads 6 + dwidenoise $dwi ${sid}__dwi_denoised.nii.gz -extent 7 -nthreads $task.cpus fslmaths ${sid}__dwi_denoised.nii.gz -thr 0 ${sid}__dwi_denoised.nii.gz """ } process TOPUP { cpus 4 + memory { 8.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(revb0) @@ -111,6 +119,7 @@ process TOPUP { process EDDY_TOPUP { cpus params.processes_eddy memory { 5.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0s_corrected), path(field), path(movpar) @@ -148,6 +157,8 @@ process EDDY_TOPUP { process N4 { cpus 1 + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -174,6 +185,8 @@ process N4 { process CROP_DWI { cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(b0_mask) @@ -197,6 +210,8 @@ process CROP_DWI { process DENOISE_T1 { cpus params.processes_denoise_t1 + memory { 2.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(t1) @@ -217,6 +232,8 @@ process DENOISE_T1 { process N4_T1 { cpus 1 + memory { 2.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(t1) @@ -238,6 +255,8 @@ process N4_T1 { process CROP_ANAT { cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(anat), path(mask) @@ -259,6 +278,8 @@ process CROP_ANAT { process RESAMPLE_T1 { cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(t1) @@ -280,6 +301,8 @@ process RESAMPLE_T1 { process BET_T1 { cpus params.processes_bet_t1 + memory { 16.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(t1) @@ -306,6 +329,8 @@ process BET_T1 { process RESAMPLE_ANAT { cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(t2w), path(mask) @@ -331,6 +356,8 @@ process RESAMPLE_ANAT { process NORMALIZE { cpus 3 + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -373,7 +400,9 @@ process NORMALIZE { } process RESAMPLE_DWI { - cpus 3 + cpus 1 + memory { 6.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(mask) @@ -397,7 +426,9 @@ process RESAMPLE_DWI { } process EXTRACT_B0 { - cpus 3 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -417,6 +448,8 @@ process EXTRACT_B0 { process DWI_MASK { cpus 1 + memory { 8.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) diff --git a/modules/tracking/processes/registration_processes.nf b/modules/tracking/processes/registration_processes.nf index c445dec..11c2a50 100644 --- a/modules/tracking/processes/registration_processes.nf +++ b/modules/tracking/processes/registration_processes.nf @@ -4,6 +4,8 @@ nextflow.enable.dsl=2 process REGISTER_T2 { cpus params.processes_registration + memory { 16.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(md), path(t2w), path(wm_mask) @@ -56,6 +58,8 @@ process REGISTER_T2 { process REGISTER_T1 { cpus params.processes_registration + memory { 16.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(fa), path(t1), path(t1_mask), path(b0) diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index e9006b0..b8cdceb 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -4,6 +4,8 @@ nextflow.enable.dsl=2 process SEGMENT_TISSUES { cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(anat) @@ -31,6 +33,8 @@ process SEGMENT_TISSUES { process ATROPOS_SEG { cpus 1 + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(anat), path(mask) @@ -55,6 +59,8 @@ process ATROPOS_SEG { process GENERATE_MASKS { cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(wm_mask), path(fa) @@ -79,6 +85,8 @@ process GENERATE_MASKS { process LOCAL_TRACKING_MASK { cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(wm), path(fa) @@ -104,6 +112,8 @@ process LOCAL_TRACKING_MASK { process LOCAL_SEEDING_MASK { cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(wm), path(fa) @@ -129,6 +139,8 @@ process LOCAL_SEEDING_MASK { process LOCAL_TRACKING { cpus 2 + memory { 16.GB * task.attempt } + time { 8.hour * task.attempt } input: tuple val(sid), path(fodf), path(seeding_mask), path(tracking_mask) @@ -155,6 +167,8 @@ process LOCAL_TRACKING { process PFT_SEEDING_MASK { cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(wm), path(fa), path(interface_mask) @@ -188,6 +202,8 @@ process PFT_SEEDING_MASK { process PFT_TRACKING_MASK { cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(wm), path(gm), path(csf) @@ -211,6 +227,8 @@ process PFT_TRACKING_MASK { process PFT_TRACKING { cpus 2 + memory { 16.GB * task.attempt } + time { 15.hour * task.attempt } input: tuple val(sid), path(fodf), path(include), path(exclude), path(seed) diff --git a/nextflow.config b/nextflow.config index 9449b88..31c6273 100644 --- a/nextflow.config +++ b/nextflow.config @@ -1,5 +1,5 @@ process { - publishDir = {"./Results_ChildBrainFlow/$sid/${task.process.replaceAll(':', '-')}"} + publishDir = {"${params.output_dir}/$sid/${task.process.replaceAll(':', '-')}"} scratch = true errorStrategy = { task.attempt <= 3 ? 'retry' : 'ignore' } maxRetries = 3 @@ -170,7 +170,7 @@ params { processes_commit = 8 processes_afd_fixel = 4 processes_connectivity = 4 - params.commit_memory_limit = '6.GB' + params.commit_memory_limit = '16.GB' // Profiles Options run_freesurfer = false @@ -236,7 +236,7 @@ singularity.autoMounts = true profiles { no_symlink { process{ - publishDir = [path: {"./Results_Infant_Tracking/$sid/${task.process.replaceAll(':', '-')}"}, mode: 'copy'] + publishDir = [path: {"${params.output_dir}/$sid/${task.process.replaceAll(':', '-')}"}, mode: 'copy'] } } @@ -244,6 +244,18 @@ profiles { process.scratch="/tmp" } + hcp { + process { + executor = 'slurm' + pollInterval = '60 sec' + submitRateLimit = '60/1min' + queueSize = 1000 + errorStrategy = 'retry' + maxRetries = 1 + errorStrategy = { task.exitStatus in [137,139,140] ? 'retry' : 'finish' } + } + } + freesurfer { params.run_freesurfer = true } From 1f424f4af1518e9a4f1733e0a1722a645fd3336f Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 4 Jan 2024 12:41:39 -0500 Subject: [PATCH 48/54] fix mem in freesurfer + queuesize and maxforks --- modules/freesurfer/processes/freesurfer.nf | 4 ++-- nextflow.config | 7 +++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index b5b3d25..37746fd 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -4,7 +4,7 @@ nextflow.enable.dsl=2 process FREESURFER { cpus params.nb_threads - memory { 24.GB * task.attempt } + memory { 31.GB * task.attempt } time { 6.hour * task.attempt } input: @@ -23,7 +23,7 @@ process FREESURFER { process RECON_SURF { cpus params.nb_threads - memory { 24.GB * task.attempt } + memory { 31.GB * task.attempt } time { 6.hour * task.attempt } diff --git a/nextflow.config b/nextflow.config index 31c6273..1a7aa81 100644 --- a/nextflow.config +++ b/nextflow.config @@ -248,12 +248,15 @@ profiles { process { executor = 'slurm' pollInterval = '60 sec' - submitRateLimit = '60/1min' - queueSize = 1000 + submitRateLimit = '1000/1min' errorStrategy = 'retry' maxRetries = 1 errorStrategy = { task.exitStatus in [137,139,140] ? 'retry' : 'finish' } } + executor { + maxForks = 1000 + queueSize = 1000 + } } freesurfer { From de39fe675ddcc7fcd4368d06cfe1597c0419bbb7 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Fri, 22 Mar 2024 17:52:59 -0400 Subject: [PATCH 49/54] Fix small typos --- README.md | 8 ++++---- modules/tracking/processes/tracking_processes.nf | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 18e910d..2c6fc29 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,9 @@ ChildBrainFlow Pipeline ======================= ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. -It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +It is essentially a merged version of multiple individual pipelines to avoid the handling of inputs/outputs between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which -process have been taken: +processes have been taken: 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) @@ -17,7 +17,7 @@ Nextflow -------- To install nextflow, please see : https://www.nextflow.io/docs/latest/getstarted.html#requirements -The pipeline export by default a `` parameters.json `` within the output directory to provide a documentation of the parameters used during the execution. For a more detailed report (excluding execution's parameters), the default feature of nextflow `` -with-report `` can be used to export a html report. Simply had this your command line when launching the pipeline: +The pipeline export by default a `` parameters.json `` within the output directory to provide a documentation of the parameters used during the execution. For a more detailed report (excluding execution's parameters), the default feature of nextflow `` -with-report `` can be used to export a html report. Simply add this your command line when launching the pipeline: ``` nextflow run main.nf --input -with-report @@ -30,7 +30,7 @@ The pipeline comes with a recipe file (`` /containers/apptainer_recipe.def ``) c dependencies to successfully run every profiles. To build the apptainer image, run this command: ``` -sudo apptainer build ``` Docker diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index b8cdceb..912545d 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -140,7 +140,7 @@ process LOCAL_SEEDING_MASK { process LOCAL_TRACKING { cpus 2 memory { 16.GB * task.attempt } - time { 8.hour * task.attempt } + time { 12.hour * task.attempt } input: tuple val(sid), path(fodf), path(seeding_mask), path(tracking_mask) @@ -228,7 +228,7 @@ process PFT_TRACKING_MASK { process PFT_TRACKING { cpus 2 memory { 16.GB * task.attempt } - time { 15.hour * task.attempt } + time { 20.hour * task.attempt } input: tuple val(sid), path(fodf), path(include), path(exclude), path(seed) From 908ba7f69124778b7390c469227cc331135dba67 Mon Sep 17 00:00:00 2001 From: Anthony Gagnon Date: Thu, 5 Sep 2024 09:37:35 -0400 Subject: [PATCH 50/54] fix process resources for mp2 --- modules/connectomics/processes/commit.nf | 6 +++--- modules/connectomics/processes/decompose.nf | 6 +++--- modules/freesurfer/processes/atlases.nf | 3 ++- modules/freesurfer/processes/freesurfer.nf | 8 ++++---- .../processes/registration_processes.nf | 6 +++--- .../tracking/processes/tracking_processes.nf | 6 +++--- nextflow.config | 17 +++++++++++------ 7 files changed, 29 insertions(+), 23 deletions(-) diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index 940ace1..de61c31 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -4,8 +4,8 @@ nextflow.enable.dsl=2 process COMMIT { cpus params.processes_commit - memory { params.commit_memory_limit * task.attempt } - time { 4.hour * task.attempt } + memory { 31.GB * task.attempt } + time { 8.hour * task.attempt } input: tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks), path(para_diff), path(iso_diff), path(perp_diff) @@ -135,4 +135,4 @@ process AVERAGE_PRIORS { cat $perp_diff > all_perp_diff.txt awk '{ total += \$1; count++ } END { print total/count }' all_perp_diff.txt > mean_perp_diff.txt """ -} \ No newline at end of file +} diff --git a/modules/connectomics/processes/decompose.nf b/modules/connectomics/processes/decompose.nf index 3f72780..9293ad4 100644 --- a/modules/connectomics/processes/decompose.nf +++ b/modules/connectomics/processes/decompose.nf @@ -4,8 +4,8 @@ nextflow.enable.dsl=2 process DECOMPOSE_CONNECTIVITY { cpus 1 - memory { 16.GB * task.attempt } - time { 6.hour * task.attempt } + memory { 31.GB * task.attempt } + time { 12.hour * task.attempt } input: tuple val(sid), path(trk), path(labels) @@ -36,4 +36,4 @@ process DECOMPOSE_CONNECTIVITY { $no_pruning_arg $no_remove_loops_arg $no_remove_outliers_arg --min_length $params.min_length --max_length $params.max_length \ --loop_max_angle $params.loop_max_angle --outlier_threshold $params.outlier_threshold -v """ -} \ No newline at end of file +} diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index fdafcb4..fd85a14 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -40,7 +40,7 @@ process FS_BN_GL_SF { process BN_CHILD { cpus params.nb_threads - memory { 24.GB * task.attempt } + memory { 31.GB * task.attempt } time { 2.hour * task.attempt } input: @@ -51,6 +51,7 @@ process BN_CHILD { tuple val(sid), path("*brainnetome_child_v1_dilate.nii.gz"), emit: brainnetome_child_dilated path("*[brainnetome_child]*.txt") path("*[brainnetome_child]*.json") + path("*.stats") when: params.compute_BN_child diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf index b5b3d25..a37651a 100644 --- a/modules/freesurfer/processes/freesurfer.nf +++ b/modules/freesurfer/processes/freesurfer.nf @@ -4,7 +4,7 @@ nextflow.enable.dsl=2 process FREESURFER { cpus params.nb_threads - memory { 24.GB * task.attempt } + memory { 31.GB * task.attempt } time { 6.hour * task.attempt } input: @@ -23,8 +23,8 @@ process FREESURFER { process RECON_SURF { cpus params.nb_threads - memory { 24.GB * task.attempt } - time { 6.hour * task.attempt } + memory { 31.GB * task.attempt } + time { 10.hour * task.attempt } input: @@ -48,4 +48,4 @@ process RECON_SURF { \${FREESURFER_HOME}/average/RB_all_2020-01-02.gca $sid/mri/transforms/talairach.m3z mri_convert $sid/mri/antsdn.brain.mgz ${sid}__final_t1.nii.gz """ -} \ No newline at end of file +} diff --git a/modules/tracking/processes/registration_processes.nf b/modules/tracking/processes/registration_processes.nf index 11c2a50..b68bf10 100644 --- a/modules/tracking/processes/registration_processes.nf +++ b/modules/tracking/processes/registration_processes.nf @@ -58,8 +58,8 @@ process REGISTER_T2 { process REGISTER_T1 { cpus params.processes_registration - memory { 16.GB * task.attempt } - time { 4.hour * task.attempt } + memory { 24.GB * task.attempt } + time { 6.hour * task.attempt } input: tuple val(sid), path(fa), path(t1), path(t1_mask), path(b0) @@ -106,4 +106,4 @@ process REGISTER_T1 { scil_image_math.py convert ${sid}__t1_mask_warped.nii.gz ${sid}__t1_mask_warped.nii.gz\ --data_type uint8 -f """ -} \ No newline at end of file +} diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index b8cdceb..bee7f3d 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -227,8 +227,8 @@ process PFT_TRACKING_MASK { process PFT_TRACKING { cpus 2 - memory { 16.GB * task.attempt } - time { 15.hour * task.attempt } + memory { 31.GB * task.attempt } + time { 24.hour * task.attempt } input: tuple val(sid), path(fodf), path(include), path(exclude), path(seed) @@ -255,4 +255,4 @@ process PFT_TRACKING { scil_remove_invalid_streamlines.py tmp.trk\ ${sid}__pft_tracking.trk --remove_single_point """ -} \ No newline at end of file +} diff --git a/nextflow.config b/nextflow.config index 31c6273..96635d9 100644 --- a/nextflow.config +++ b/nextflow.config @@ -170,7 +170,7 @@ params { processes_commit = 8 processes_afd_fixel = 4 processes_connectivity = 4 - params.commit_memory_limit = '16.GB' + params.commit_memory_limit = '24.GB' // Profiles Options run_freesurfer = false @@ -247,13 +247,18 @@ profiles { hcp { process { executor = 'slurm' - pollInterval = '60 sec' - submitRateLimit = '60/1min' - queueSize = 1000 errorStrategy = 'retry' maxRetries = 1 - errorStrategy = { task.exitStatus in [137,139,140] ? 'retry' : 'finish' } } + executor { + pollInterval = '180 sec' + queueGlobalStatus = true + queueStatInterval = '3 min' + submitRateLimit = '100/1min' + maxForks = 1000 + queueSize = 1000 + exitReadTimeout = '600 sec' + } } freesurfer { @@ -318,4 +323,4 @@ profiles { params.bet_anat_f = 0.1 } -} \ No newline at end of file +} From b4a7994a3d8c67493eb1ae17800b95ef0b1c2438 Mon Sep 17 00:00:00 2001 From: Anthony Gagnon Date: Tue, 15 Oct 2024 12:01:00 -0400 Subject: [PATCH 51/54] add synthstrip module for brain extraction --- modules/tracking/processes/preprocess.nf | 44 +++++++++++++++++++-- modules/tracking/workflows/preprocessing.nf | 30 ++++++++++---- 2 files changed, 64 insertions(+), 10 deletions(-) diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index f4d56ad..4ff9870 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -4,13 +4,14 @@ nextflow.enable.dsl=2 process BET_DWI { cpus 1 - memory { 4.GB * task.attempt } + memory { 8.GB * task.attempt } time { 2.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) output: tuple val(sid), path("${sid}__dwi_bet.nii.gz"), emit: bet_dwi + tuple val(sid), path("${sid}__powder_avg_bet_mask.nii.gz"), emit: bet_mask when: !params.skip_dwi_preprocessing @@ -39,6 +40,43 @@ process BET_DWI { """ } +process SYNTHSTRIP { + cpus 12 + memory { 16.GB * task.attempt } + time { 6.hour * task.attempt } + + input: + tuple val(sid), path(dwi), path(bval), path(weights) + output: + tuple val(sid), path("${sid}__dwi_bet.nii.gz"), emit: bet_dwi + tuple val(sid), path("${sid}__dwi_bet_mask.nii.gz"), emit: bet_mask + when: + !params.skip_dwi_preprocessing + + script: + def b0_thr = params.b0_thr ? "--b0_thr ${params.b0_thr}" : '' + def shells = params.shells ? "--shells ${params.shells}" : '' + def shell_thr = params.shell_thr ? "--shell_thr ${params.shell_thr}" : '' + + def gpu = params.gpu ? "--gpu" : "" + def border = params.border ? "-b " + params.border : "" + def nocsf = params.nocsf ? "--no-csf" : "" + def model = "$weights" ? "--model $weights" : "" + + // ** Using SynthStrip with infant weights on powder average image. ** // + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_compute_powder_average.py $dwi $bval ${sid}__powder_avg.nii.gz \ + $b0_thr $shells $shell_thr + mri_synthstrip -i ${sid}__powder_avg.nii.gz --out image_bet.nii.gz\ + --mask ${sid}__dwi_bet_mask.nii.gz $gpu $border $nocsf $model + mrcalc $dwi ${sid}__dwi_bet_mask.nii.gz -mult ${sid}__dwi_bet.nii.gz\ + -quiet -force -nthreads 1 + """ +} + process BET_T2 { cpus 1 memory { 4.GB * task.attempt } @@ -118,8 +156,8 @@ process TOPUP { process EDDY_TOPUP { cpus params.processes_eddy - memory { 5.GB * task.attempt } - time { 4.hour * task.attempt } + memory { 16.GB * task.attempt } + time { 16.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0s_corrected), path(field), path(movpar) diff --git a/modules/tracking/workflows/preprocessing.nf b/modules/tracking/workflows/preprocessing.nf index 07d6555..f73884a 100644 --- a/modules/tracking/workflows/preprocessing.nf +++ b/modules/tracking/workflows/preprocessing.nf @@ -4,6 +4,7 @@ nextflow.enable.dsl=2 include { BET_DWI; + SYNTHSTRIP; BET_T2; BET_T1; DENOISING; @@ -29,16 +30,15 @@ workflow DWI { main: - // ** Bet ** // - BET_DWI(dwi_channel) - // ** Denoising ** // - DENOISING(BET_DWI.out) + denoising_channel = dwi_channel + .map{[it[0], it[1]]} + DENOISING(denoising_channel) // ** Topup ** // topup_channel = dwi_channel .map{[it[0], it[2], it[3]]} - .combine(DENOISING.out, by: 0) + .combine(DENOISING.out.denoised_dwi, by: 0) .combine(rev_channel, by: 0) .map{ sid, bvals, bvecs, dwi, rev -> tuple(sid, dwi, bvals, bvecs, rev)} TOPUP(topup_channel) @@ -52,9 +52,25 @@ workflow DWI { field, movpar)} EDDY_TOPUP(eddy_channel) + // ** Bet ** // + if ( params.run_synthbet ) { + weights_ch = Channel.fromPath(params.weights) + synthstrip_channel = EDDY_TOPUP.out.dwi_bval_bvec + .map{ [it[0], it[1], it[2]] } + .combine(weights_ch) + SYNTHSTRIP(synthstrip_channel) + dwi = SYNTHSTRIP.out.bet_dwi + mask = SYNTHSTRIP.out.bet_mask + } else { + BET_DWI(EDDY_TOPUP.out.dwi_bval_bvec) + dwi = BET_DWI.out.bet_dwi + mask = BET_DWI.out.bet_mask + } + // ** N4 ** // - n4_channel = EDDY_TOPUP.out.dwi_bval_bvec - .combine(EDDY_TOPUP.out.b0_mask, by: 0) + n4_channel = dwi.combine(EDDY_TOPUP.out.dwi_bval_bvec, by: 0) + .map{ [it[0], it[1], it[3], it[4]] } + .combine(mask, by: 0) N4(n4_channel) // ** Crop ** // From a0330d2190b008be8d6000cd547e29cf352b8a85 Mon Sep 17 00:00:00 2001 From: Anthony Gagnon Date: Tue, 15 Oct 2024 12:01:28 -0400 Subject: [PATCH 52/54] atlas folder as input and fix priors module --- main.nf | 14 ++++++-- modules/connectomics/processes/commit.nf | 15 +++++++- .../connectomics/workflows/connectomics.nf | 2 +- modules/freesurfer/processes/atlases.nf | 26 +++++++------- .../freesurfer/workflows/freesurferflow.nf | 12 ++++--- nextflow.config | 36 ++++++++++++++----- 6 files changed, 75 insertions(+), 30 deletions(-) diff --git a/main.nf b/main.nf index 20cebc0..8e54e90 100644 --- a/main.nf +++ b/main.nf @@ -50,10 +50,12 @@ workflow { if ( params.run_freesurfer ) { data = get_data_freesurfer() - FREESURFERFLOW(data.anat) + utils = Channel.fromPath(params.atlas_utils_folder) + FREESURFERFLOW(data.anat, utils) + } - if ( params.priors ) { + if ( params.priors && !params.run_tracking ) { data = get_data_tracking() @@ -116,6 +118,14 @@ workflow { fa_channel) } + if ( params.priors && params.run_tracking ) { + + data = get_data_tracking() + + PRIORS(DWI.out.dwi_bval_bvec) + + } + if ( params.run_connectomics && params.run_tracking ) { // ** Fetch tracking data ** // tracking = TRACKING.out.trk diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index de61c31..9af904b 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -93,7 +93,7 @@ process COMPUTE_PRIORS { output: tuple val("Priors"), path("${sid}__para_diff.txt"), emit: para_diff tuple val("Priors"), path("${sid}__iso_diff.txt"), emit: iso_diff - tuple val("Priors"), path("${sid}__perp_diff.txt"), emit: perp_diff + tuple val("Priors"), path("${sid}__perp_diff.txt"), emit: perp_diff, optional: true tuple val(sid), path("${sid}__mask_1fiber.nii.gz"), emit: mask_1fiber tuple val(sid), path("${sid}__mask_ventricles.nii.gz"), emit: mask_ventricles @@ -101,6 +101,7 @@ process COMPUTE_PRIORS { params.run_commit && params.compute_priors script: + if ( params.multishell ) { """ scil_compute_NODDI_priors.py $fa $ad $rd $md \ --out_txt_1fiber_para ${sid}__para_diff.txt \ @@ -111,6 +112,18 @@ process COMPUTE_PRIORS { --fa_min $params.fa_min_priors --fa_max $params.fa_max_priors \ --md_min $params.md_min_priors --roi_radius $params.roi_radius_priors """ + } + else { + """ + scil_compute_NODDI_priors.py $fa $ad $md \ + --out_txt_1fiber ${sid}__para_diff.txt \ + --out_txt_ventricles ${sid}__iso_diff.txt \ + --out_mask_1fiber ${sid}__mask_1fiber.nii.gz \ + --out_mask_ventricles ${sid}__mask_ventricles.nii.gz \ + --fa_min $params.fa_min_priors --fa_max $params.fa_max_priors \ + --md_min $params.md_min_priors --roi_radius $params.roi_radius_priors + """ + } } process AVERAGE_PRIORS { diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index 75cabe3..cb9e650 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -49,7 +49,7 @@ workflow CONNECTOMICS { // ** If -profile infant is used, first part will be run. COMMIT1 is the only supported ** // // ** method as of now, since running commit2 requires a decomposition first, which is not an ** // // ** easy task on infant data. This will be improved in the future. ** // - if ( params.infant_config ) { + if ( params.commit_on_trk ) { // ** COMMIT1 processing on trk ** // commit_channel = tracking_channel diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf index fd85a14..5db6e9b 100644 --- a/modules/freesurfer/processes/atlases.nf +++ b/modules/freesurfer/processes/atlases.nf @@ -8,7 +8,7 @@ process FS_BN_GL_SF { time { 4.hour * task.attempt } input: - tuple val(sid), path(folder) + tuple val(sid), path(folder), path(utils) output: tuple val(sid), path("*freesurfer_v5.nii.gz"), emit: freesurfer @@ -31,8 +31,8 @@ process FS_BN_GL_SF { script: """ - ln -s $params.atlas_utils_folder/fsaverage \$(dirname ${folder})/ - bash $params.atlas_utils_folder/freesurfer_utils/generate_atlas_FS_BN_GL_SF_v5.sh \$(dirname ${folder}) \ + ln -s $utils/fsaverage \$(dirname ${folder})/ + bash $utils/freesurfer_utils/generate_atlas_FS_BN_GL_SF_v5.sh \$(dirname ${folder}) \ ${sid} ${params.nb_threads} FS_BN_GL_SF_Atlas/ cp $sid/FS_BN_GL_SF_Atlas/* ./ """ @@ -44,7 +44,7 @@ process BN_CHILD { time { 2.hour * task.attempt } input: - tuple val(sid), path(folder) + tuple val(sid), path(folder), path(utils) output: tuple val(sid), path("*brainnetome_child_v1.nii.gz"), emit: brainnetome_child @@ -58,8 +58,8 @@ process BN_CHILD { script: """ - ln -s $params.atlas_utils_folder/fsaverage \$(dirname ${folder})/ - bash $params.atlas_utils_folder/freesurfer_utils/generate_atlas_BN_child.sh \$(dirname ${folder}) \ + ln -s $utils/fsaverage \$(dirname ${folder})/ + bash $utils/freesurfer_utils/generate_atlas_BN_child.sh \$(dirname ${folder}) \ ${sid} ${params.nb_threads} Child_Atlas/ cp $sid/Child_Atlas/* ./ """ @@ -71,7 +71,7 @@ process LOBES { time { 1.hour * task.attempt } input: - tuple val(sid), path(folder) + tuple val(sid), path(folder), path(utils) output: path("*lobes*.nii.gz"), emit: lobes @@ -105,7 +105,7 @@ process LOBES { 51 52 53 54 58 60 --volume_ids wmparc.nii.gz 47 --volume_ids wmparc.nii.gz 16 --merge scil_dilate_labels.py atlas_lobes_v5.nii.gz atlas_lobes_v5_dilate.nii.gz --distance 2 \ --labels_to_dilate 1 2 3 4 5 6 8 9 10 11 12 14 15 --mask brain_mask.nii.gz - cp $params.atlas_utils_folder/freesurfer_utils/*lobes_v5* ./ + cp $utils/freesurfer_utils/*lobes_v5* ./ """ } @@ -115,7 +115,7 @@ process LAUSANNE { time { 4.hour * task.attempt } input: - tuple val(sid), path(folder) + tuple val(sid), path(folder), path(utils) each scale output: @@ -132,9 +132,9 @@ process LAUSANNE { script: """ - ln -s $params.atlas_utils_folder/fsaverage \$(dirname ${folder})/ + ln -s $utils/fsaverage \$(dirname ${folder})/ freesurfer_home=\$(dirname \$(dirname \$(which mri_label2vol))) - /usr/bin/python $params.atlas_utils_folder/lausanne_multi_scale_atlas/generate_multiscale_parcellation.py \ + /usr/bin/python $utils/lausanne_multi_scale_atlas/generate_multiscale_parcellation.py \ \$(dirname ${folder}) ${sid} \$freesurfer_home --scale ${scale} --dilation_factor 0 --log_level DEBUG mri_convert ${folder}/mri/rawavg.mgz rawavg.nii.gz @@ -146,7 +146,7 @@ process LAUSANNE { scil_dilate_labels.py lausanne_2008_scale_${scale}.nii.gz lausanne_2008_scale_${scale}_dilate.nii.gz \ --distance 2 --mask mask.nii.gz - cp $params.atlas_utils_folder/lausanne_multi_scale_atlas/*.txt ./ - cp $params.atlas_utils_folder/lausanne_multi_scale_atlas/*.json ./ + cp $utils/lausanne_multi_scale_atlas/*.txt ./ + cp $utils/lausanne_multi_scale_atlas/*.json ./ """ } diff --git a/modules/freesurfer/workflows/freesurferflow.nf b/modules/freesurfer/workflows/freesurferflow.nf index e5d1b12..b46583c 100644 --- a/modules/freesurfer/workflows/freesurferflow.nf +++ b/modules/freesurfer/workflows/freesurferflow.nf @@ -16,6 +16,7 @@ include { workflow FREESURFERFLOW { take: anat + utils main: @@ -31,18 +32,21 @@ workflow FREESURFERFLOW { t1 = RECON_SURF.out.final_t1 } + // ** Combining atlases and utils channels ** // + atlases_channel = folder_channel.combine(utils) + // ** Computing FS_BN_GL_SF atlases ** // - FS_BN_GL_SF(folder_channel) + FS_BN_GL_SF(atlases_channel) // ** Computing BN_CHILD Atlas ** // - BN_CHILD(folder_channel) + BN_CHILD(atlases_channel) // ** Computing lobes atlases ** // - LOBES(folder_channel) + LOBES(atlases_channel) // ** Computing lausanne atlas ** // scales = Channel.from(1,2,3,4,5) - LAUSANNE(folder_channel, + LAUSANNE(atlases_channel, scales) // ** Reorganizing Lausanne multiscale atlas channel ** // diff --git a/nextflow.config b/nextflow.config index 96635d9..d55ccd2 100644 --- a/nextflow.config +++ b/nextflow.config @@ -27,6 +27,16 @@ params { initial_bet_f = 0.16 final_bet_f = 0.16 + //** SYNTHSTRIP Options **// + run_synthbet = false + b0_thr = 50 + shells = "0 1500" + shell_thr = 50 + gpu = false + border = false + nocsf = false + weights = "code/synthstrip.infant.1.pt" + //** BET ANAT Options **// run_bet_anat = false bet_anat_f = 0.16 @@ -132,7 +142,7 @@ params { processes_denoise_dwi = 4 processes_denoise_t1 = 4 processes_bet_t1 = 4 - processes_eddy = 1 + processes_eddy = 4 processes_registration = 4 processes_fodf = 4 @@ -151,8 +161,9 @@ params { compute_priors = false fa_min_priors = 0.7 fa_max_priors = 0.1 - md_min_priors = 0.003 + md_min_priors = 0.002 roi_radius_priors = 20 + multishell = false //** COMMIT Options **// run_commit = true @@ -253,11 +264,11 @@ profiles { executor { pollInterval = '180 sec' queueGlobalStatus = true - queueStatInterval = '3 min' - submitRateLimit = '100/1min' - maxForks = 1000 - queueSize = 1000 - exitReadTimeout = '600 sec' + queueStatInterval = '3 min' + submitRateLimit = '100/1min' + maxForks = 1000 + queueSize = 1000 + exitReadTimeout = '600 sec' } } @@ -276,6 +287,10 @@ profiles { priors { params.priors = true + params.run_commit = true + params.compute_priors = true + params.set_frf = false + params.mean_frf = true } infant { @@ -286,6 +301,9 @@ profiles { params.initial_bet_f = 0.5 params.final_bet_f = 0.35 + //** SYNTHSTRIP Options **// + params.run_synthbet = true + //** NORMALIZE Options **// params.fa_mask_threshold = 0.10 @@ -309,8 +327,8 @@ profiles { // ** COMMIT Options ** // params.run_commit = true - params.use_commit2 = false - params.commit_on_trk = true + params.use_commit2 = true + // params.commit_on_trk = true params.para_diff = "1.2E-3" params.iso_diff = "2.0E-3" From fdf5c136981920ef3b59aad4154aa6ae0187b55e Mon Sep 17 00:00:00 2001 From: Anthony Gagnon Date: Tue, 15 Oct 2024 13:06:13 -0400 Subject: [PATCH 53/54] fix usages, gitignore + better handling of weights for synthstrip --- .gitignore | 5 +++++ USAGE | 6 +++--- main.nf | 7 +++++++ modules/connectomics/USAGE_ALL | 11 +++++++++++ modules/connectomics/USAGE_TRACKING | 11 +++++++++++ modules/connectomics/USAGE_TRACKING_INFANT | 11 +++++++++++ modules/tracking/USAGE | 11 +++++++++++ modules/tracking/USAGE_INFANT | 11 +++++++++++ modules/tracking/workflows/preprocessing.nf | 7 ++++++- nextflow.config | 3 +-- 10 files changed, 77 insertions(+), 6 deletions(-) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..351a4e3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +.devcontainer +.vscode +.nextflow* +.DS_Store +*.code-workspace \ No newline at end of file diff --git a/USAGE b/USAGE index 0e111d4..7158c3d 100644 --- a/USAGE +++ b/USAGE @@ -32,9 +32,9 @@ Here is a list of available profiles: local modelling and tractography (see [1] for details). 2. connectomics : If selected, labels registration, tractogram segmentation and connectivity will be performed. - 3. freesurfer : If selected, FreeSurfer Recon-all will be run on input T1s and label files will be - generated (available atlases: freesurfer, brainnetome and glasser). Only available - if T1 volume is supplied as input (therefore, not with -profile infant). + 3. freesurfer : If selected, FreeSurfer Recon-all (or FastSurfer) will be run on input T1s and label files will be + generated (available atlases: freesurfer's atlases, Brainnetome, Brainnetome Child and Glasser). + Only available if T1 volume is supplied as input (therefore, not with -profile infant). 4. infant : If selected, the pipeline will assume the data is from infant patients (< 2 years old) and adapt some parameters to perform tractography and connectomics. diff --git a/main.nf b/main.nf index 8e54e90..2d7bd85 100644 --- a/main.nf +++ b/main.nf @@ -263,6 +263,13 @@ def display_usage () { "eddy_cmd":"$params.eddy_cmd", "topup_bet_f":"$params.topup_bet_f", "use_slice_drop_correction":"$params.use_slice_drop_correction", + "run_synthbet":"$params.run_synthbet", + "shells":"$params.shells", + "shell_thr":"$params.shell_thr", + "gpu":"$params.gpu", + "border":"$params.border", + "nocsf":"$params.nocsf", + "weights":"$params.weights", "dwi_shell_tolerance":"$params.dwi_shell_tolerance", "fa_mask_threshold":"$params.fa_mask_threshold", "anat_resolution":"$params.anat_resolution", diff --git a/modules/connectomics/USAGE_ALL b/modules/connectomics/USAGE_ALL index b774ab5..bdb1ef8 100644 --- a/modules/connectomics/USAGE_ALL +++ b/modules/connectomics/USAGE_ALL @@ -67,6 +67,17 @@ OPTIONAL ARGUMENTS (current value) --use_slice_drop_correction If set, will use the slice drop correction from EDDY. ($use_slice_drop_correction) + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + NORMALIZATION OPTIONS --fa_mask_threshold Threshold to use when creating the fa mask for normalization. ($fa_mask_threshold) diff --git a/modules/connectomics/USAGE_TRACKING b/modules/connectomics/USAGE_TRACKING index 14f8253..099a338 100644 --- a/modules/connectomics/USAGE_TRACKING +++ b/modules/connectomics/USAGE_TRACKING @@ -73,6 +73,17 @@ OPTIONAL ARGUMENTS (current value) --use_slice_drop_correction If set, will use the slice drop correction from EDDY. ($use_slice_drop_correction) + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + NORMALIZATION OPTIONS --fa_mask_threshold Threshold to use when creating the fa mask for normalization. ($fa_mask_threshold) diff --git a/modules/connectomics/USAGE_TRACKING_INFANT b/modules/connectomics/USAGE_TRACKING_INFANT index 925a059..7a96fd1 100644 --- a/modules/connectomics/USAGE_TRACKING_INFANT +++ b/modules/connectomics/USAGE_TRACKING_INFANT @@ -77,6 +77,17 @@ OPTIONAL ARGUMENTS (current value) --use_slice_drop_correction If set, will use the slice drop correction from EDDY. ($use_slice_drop_correction) + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + NORMALIZATION OPTIONS --fa_mask_threshold Threshold to use when creating the fa mask for normalization. ($fa_mask_threshold) diff --git a/modules/tracking/USAGE b/modules/tracking/USAGE index f91c7d1..2ef2e77 100644 --- a/modules/tracking/USAGE +++ b/modules/tracking/USAGE @@ -67,6 +67,17 @@ OPTIONAL ARGUMENTS (current value) --use_slice_drop_correction If set, will use the slice drop correction from EDDY. ($use_slice_drop_correction) + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + NORMALIZATION OPTIONS --fa_mask_threshold Threshold to use when creating the fa mask for normalization. ($fa_mask_threshold) diff --git a/modules/tracking/USAGE_INFANT b/modules/tracking/USAGE_INFANT index fee0931..3041c8e 100644 --- a/modules/tracking/USAGE_INFANT +++ b/modules/tracking/USAGE_INFANT @@ -71,6 +71,17 @@ OPTIONAL ARGUMENTS (current value) --use_slice_drop_correction If set, will use the slice drop correction from EDDY. ($use_slice_drop_correction) + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + NORMALIZATION OPTIONS --fa_mask_threshold Threshold to use when creating the fa mask for normalization. ($fa_mask_threshold) diff --git a/modules/tracking/workflows/preprocessing.nf b/modules/tracking/workflows/preprocessing.nf index f73884a..1b191d8 100644 --- a/modules/tracking/workflows/preprocessing.nf +++ b/modules/tracking/workflows/preprocessing.nf @@ -54,10 +54,15 @@ workflow DWI { // ** Bet ** // if ( params.run_synthbet ) { - weights_ch = Channel.fromPath(params.weights) + if ( params.weights ) { + weights_ch = Channel.fromPath(params.weights) + } else { + weights_ch = null + } synthstrip_channel = EDDY_TOPUP.out.dwi_bval_bvec .map{ [it[0], it[1], it[2]] } .combine(weights_ch) + .map{ it[0..2] + [it[3] ?: []] } SYNTHSTRIP(synthstrip_channel) dwi = SYNTHSTRIP.out.bet_dwi mask = SYNTHSTRIP.out.bet_mask diff --git a/nextflow.config b/nextflow.config index d55ccd2..02be61d 100644 --- a/nextflow.config +++ b/nextflow.config @@ -29,13 +29,12 @@ params { //** SYNTHSTRIP Options **// run_synthbet = false - b0_thr = 50 shells = "0 1500" shell_thr = 50 gpu = false border = false nocsf = false - weights = "code/synthstrip.infant.1.pt" + weights = false //** BET ANAT Options **// run_bet_anat = false From f8c02ea29d6f9473d1906c9ab5d6f4ba0c004c68 Mon Sep 17 00:00:00 2001 From: Anthony Gagnon Date: Tue, 15 Oct 2024 13:23:24 -0400 Subject: [PATCH 54/54] replace null value for empty list --- modules/tracking/workflows/preprocessing.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/tracking/workflows/preprocessing.nf b/modules/tracking/workflows/preprocessing.nf index 1b191d8..b418179 100644 --- a/modules/tracking/workflows/preprocessing.nf +++ b/modules/tracking/workflows/preprocessing.nf @@ -57,7 +57,7 @@ workflow DWI { if ( params.weights ) { weights_ch = Channel.fromPath(params.weights) } else { - weights_ch = null + weights_ch = [] } synthstrip_channel = EDDY_TOPUP.out.dwi_bval_bvec .map{ [it[0], it[1], it[2]] }