From d4218d15c4c74373ef0146fe517d8855709a2961 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 2 Jun 2022 13:47:58 -0400 Subject: [PATCH 1/8] switch to tractoflow 2.3.0 --- source/conf.py | 2 +- source/data/install.sh | 8 ++++---- source/index.rst | 5 ++--- source/installation/install.rst | 20 +++++++++++++------- source/installation/requirements.rst | 8 ++++---- source/pipeline/launch.rst | 8 ++++---- source/pipeline/options.rst | 11 +++++++++++ source/pipeline/profiles.rst | 13 ++++++++++++- source/reference/changelog.rst | 2 +- source/reference/github.rst | 4 ++-- 10 files changed, 54 insertions(+), 27 deletions(-) diff --git a/source/conf.py b/source/conf.py index 8a992ed..43e34dc 100644 --- a/source/conf.py +++ b/source/conf.py @@ -20,7 +20,7 @@ # -- Project information ----------------------------------------------------- project = u'TractoFlow-documentation' -copyright = u'2021, SCIL' +copyright = u'2022, SCIL' author = u'SCIL' # The short X.Y version diff --git a/source/data/install.sh b/source/data/install.sh index 04915b5..80b1442 100644 --- a/source/data/install.sh +++ b/source/data/install.sh @@ -1,10 +1,10 @@ #!/bin/bash echo "Downloading TractoFlow..." -wget -q https://github.com/scilus/tractoflow/releases/download/2.2.1/tractoflow-2.2.1.zip -unzip -q tractoflow-2.2.1.zip -rm -rf tractoflow-2.2.1.zip +wget -q https://github.com/scilus/tractoflow/releases/download/2.3.0/tractoflow-2.3.0.zip +unzip -q tractoflow-2.3.0.zip +rm -rf tractoflow-2.3.0.zip echo "Done: TractoFlow downloaded." echo "Downloading TractoFlow Singularity..." -wget -q --show-progress http://scil.dinf.usherbrooke.ca/containers_list/tractoflow_2.2.1_b9a527_2021-04-13.sif +wget -q --show-progress http://scil.dinf.usherbrooke.ca/containers_list/scilus_1.3.0.sif echo "Done: TractoFlow Singularity downloaded." diff --git a/source/index.rst b/source/index.rst index 8a3ab31..29d0f3d 100644 --- a/source/index.rst +++ b/source/index.rst @@ -2,7 +2,7 @@ Welcome to the TractoFlow user documentation! ============================================= .. note:: - New release available: 2.2.1. + New release available: 2.3.0. TractoFlow now support BIDS as input data. TractoFlow pipeline is developed by the Sherbrooke Connectivity Imaging Lab (`SCIL`_) @@ -27,8 +27,7 @@ For MacOS users, please see this section :ref:`docker-tractoflow` for setup. For any issues or difficulties with TractoFlow, please use our Neurostar tag: https://neurostars.org/tag/tractoflow .. tip:: - If you want to analyse datasets with white-matter lesions, we highly recommends - to use our devrived version of TractoFlow: TractoFlow Atlas based Segmentation (ABS) https://github.com/scilus/TractoFlow-ABS + If you want to analyse datasets with white-matter lesions check the tractoflow profile ABS. .. toctree:: :maxdepth: 1 diff --git a/source/installation/install.rst b/source/installation/install.rst index d5429f5..d1bf9c5 100644 --- a/source/installation/install.rst +++ b/source/installation/install.rst @@ -7,7 +7,7 @@ Easy install method Enter this command in your terminal (it downloads the container and TractoFlow code in the current directory): :: - curl -s https://tractoflow-documentation.readthedocs.io/en/2.2.1/install.sh | bash + curl -s https://tractoflow-documentation.readthedocs.io/en/2.3.0/install.sh | bash TractoFlow pipeline ------------------- @@ -19,7 +19,7 @@ Download the last release of TractoFlow pipeline: :: - $> wget https://github.com/scilus/tractoflow/releases/download/2.2.1/tractoflow-2.2.1.zip && unzip tractoflow-2.2.1.zip + $> wget https://github.com/scilus/tractoflow/releases/download/2.3.0/tractoflow-2.3.0.zip && unzip tractoflow-2.3.0.zip For developers ############## @@ -47,7 +47,13 @@ Download the last release of the Singularity container for TractoFlow: :: - $> wget http://scil.dinf.usherbrooke.ca/containers_list/tractoflow_2.2.1_b9a527_2021-04-13.sif + $> wget http://scil.dinf.usherbrooke.ca/containers_list/scilus_1.3.0.sif + +Or if you have sudo privileges + +:: + + $> sudo singularity build scilus_1.3.0.sif docker://scilus/scilus:1.3.0 For developers ############## @@ -57,16 +63,16 @@ Clone the singularity repository for TractoFlow pipeline: :: # Clone with HTTPS - $> git clone https://github.com/scilus/containers-tractoflow.git + $> git clone https://github.com/scilus/containers-scilus.git # Clone with SSH - $> git clone git@github.com:scilus/containers-tractoflow.git + $> git clone git@github.com:scilus/containers-scilus.git Then, you can build the singularity image: :: - $> singularity build singularity_name.img singularity_tractoflow.def + $> singularity build singularity_name.sif singularity_tractoflow.def .. _docker-tractoflow: @@ -80,6 +86,6 @@ Download the last release of the Docker container for TractoFlow: :: - $> docker pull scilus/tractoflow:2.2.1 + $> docker pull scilus/scilus:1.3.0 Please see :ref:`profiles` section to use `macos` profile. diff --git a/source/installation/requirements.rst b/source/installation/requirements.rst index a63ed8b..53f4a06 100644 --- a/source/installation/requirements.rst +++ b/source/installation/requirements.rst @@ -17,7 +17,7 @@ Local Computer :: - $> wget https://github.com/nextflow-io/nextflow/releases/download/v19.04.0/nextflow && chmod +x nextflow && \ + $> wget https://github.com/nextflow-io/nextflow/releases/download/v21.10.6/nextflow && chmod +x nextflow && \ echo 'export PATH=$PATH:'$(pwd) >> ~/.bash_profile && source ~/.bash_profile High Performance computer (HPC) @@ -28,8 +28,8 @@ High Performance computer (HPC) :: - $> wget https://github.com/nextflow-io/nextflow/releases/download/v19.04.0/nextflow-19.04.0-all && \ - mv nextflow-19.04.0-all nextflow && \ + $> wget https://github.com/nextflow-io/nextflow/releases/download/v21.10.6/nextflow-21.10.6-all && \ + mv nextflow-21.10.6-all nextflow && \ chmod +x nextflow && echo 'export PATH=$PATH:'$(pwd) >> ~/.bash_profile && source ~/.bash_profile Singularity @@ -56,7 +56,7 @@ If you are Debian/Ubuntu, you can get neurodebian: High Performance computer (HPC) ############################### -Please try ``module load singularity/3.5`` or check with an administrator or on the HPC website. +Please try ``module load singularity/3.7`` or check with an administrator or on the HPC website. Docker ------ diff --git a/source/pipeline/launch.rst b/source/pipeline/launch.rst index d73fda4..f40d64f 100644 --- a/source/pipeline/launch.rst +++ b/source/pipeline/launch.rst @@ -9,9 +9,9 @@ To run the pipeline, use the following command: :: # With Singularity - $> nextflow run tractoflow/main.nf --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.img -resume + $> nextflow run tractoflow/main.nf --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume # Or - $> nextflow run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.img -resume + $> nextflow run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume # With Docker $> nextflow run tractoflow/main.nf --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume @@ -42,7 +42,7 @@ to be executed with ``sbatch``. #SBATCH --mem=0 #SBATCH --time=48:00:00 - nextflow -c singularity.conf run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.img -resume + nextflow -c singularity.conf run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume To launch on multiple nodes, you must to use the MPI option that use Ignite executor. The following example use 2 nodes with 32 threads on each nodes. The follwing lines @@ -59,7 +59,7 @@ must be saved in ``.sh`` file (e.g. ``cmd.sh``) to be executed with ``sbatch``. export NXF_CLUSTER_SEED=$(shuf -i 0-16777216 -n 1) - srun nextflow -c singularity.conf run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.img -with-mpi -resume + srun nextflow -c singularity.conf run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -with-mpi -resume To launch the pipeline on the HPC: diff --git a/source/pipeline/options.rst b/source/pipeline/options.rst index 4d862af..b9179a7 100644 --- a/source/pipeline/options.rst +++ b/source/pipeline/options.rst @@ -4,6 +4,17 @@ Options To display the options of Tractoflow, please use ``nextflow run tractoflow/main.nf --help``. +Mandatory arguments +------------ + +``--dti_shells dti_shells`` + Shells selected to compute the dti metrics (generally b < 1200). + Please write them between quotes e.g. (--dti_shells "0 300"). + +``--fodf_shells fodf_shells`` + Shells selected to compute the fodf metrics (generally b > 700). + Please write them between quotes e.g. (--fodf_shells "0 1000"). + Optional BIDS arguments ------------ diff --git a/source/pipeline/profiles.rst b/source/pipeline/profiles.rst index af7bdae..ad06f57 100644 --- a/source/pipeline/profiles.rst +++ b/source/pipeline/profiles.rst @@ -7,7 +7,7 @@ To select one or multiple profiles, please use the ``-profile`` option. For exam :: - $> nextflow run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -profile macos,fully_reproducible -with-singularity singularity_name.img -resume + $> nextflow run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -profile macos,fully_reproducible -with-singularity singularity_name.sif -resume Profiles available ------------------ @@ -25,3 +25,14 @@ Profiles available ``cbrain`` When this profile is used, Nextflow will copy all the output files in publishDir and not use symlinks. + +``ABS`` + When this profile is used, TractoFlow-ABS (Atlas Based Segmentation) is used. + This profile must be used for pathological data. + The aparc+aseg.nii.gz and wmparc.nii.gz must be in the same space than t1.nii.gz + +``bundling`` + When this profile is used, it will activate custom tracking parameters to improve recobundle results. + +``connectomics`` + When this profile is used, it will activate custom tracking parameters to improve connectomics analysis. diff --git a/source/reference/changelog.rst b/source/reference/changelog.rst index 8184d4b..088209c 100644 --- a/source/reference/changelog.rst +++ b/source/reference/changelog.rst @@ -1,7 +1,7 @@ Changelog ========= -2.2.1 +2.3.0 ######### Date: 09 April 2021 diff --git a/source/reference/github.rst b/source/reference/github.rst index c91c334..36f9192 100644 --- a/source/reference/github.rst +++ b/source/reference/github.rst @@ -3,7 +3,7 @@ Github repositories TractoFlow pipeline repository: `TractoFlow`_ -TractoFlow Containers repository: `Containers-TractoFlow`_ +TractoFlow Containers repository: `Containers-Scilus`_ .. _TractoFlow: https://github.com/scilus/tractoflow -.. _Containers-TractoFlow: https://github.com/scilus/containers-tractoflow +.. _Containers-Scilus: https://github.com/scilus/containers-scilus From eac8f76db91d42b3a4fb1efc420b2de1c8987577 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 2 Jun 2022 14:04:20 -0400 Subject: [PATCH 2/8] new nextflow commands --- source/data/install.sh | 4 +--- source/pipeline/launch.rst | 12 ++++++------ source/pipeline/options.rst | 2 +- source/pipeline/profiles.rst | 2 +- 4 files changed, 9 insertions(+), 11 deletions(-) diff --git a/source/data/install.sh b/source/data/install.sh index 80b1442..958c446 100644 --- a/source/data/install.sh +++ b/source/data/install.sh @@ -1,9 +1,7 @@ #!/bin/bash echo "Downloading TractoFlow..." -wget -q https://github.com/scilus/tractoflow/releases/download/2.3.0/tractoflow-2.3.0.zip -unzip -q tractoflow-2.3.0.zip -rm -rf tractoflow-2.3.0.zip +nextflow pull scilus/tractoflow echo "Done: TractoFlow downloaded." echo "Downloading TractoFlow Singularity..." wget -q --show-progress http://scil.dinf.usherbrooke.ca/containers_list/scilus_1.3.0.sif diff --git a/source/pipeline/launch.rst b/source/pipeline/launch.rst index f40d64f..e2060b7 100644 --- a/source/pipeline/launch.rst +++ b/source/pipeline/launch.rst @@ -9,14 +9,14 @@ To run the pipeline, use the following command: :: # With Singularity - $> nextflow run tractoflow/main.nf --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.3.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume # Or - $> nextflow run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume # With Docker - $> nextflow run tractoflow/main.nf --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume + $> nextflow run tractoflow -r 2.3.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume # Or - $> nextflow run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume + $> nextflow run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume Where ``DTI_SHELLS`` are the shells used to compute the DTI metrics (typically b-value < 1200 e.g. "0 1000") and ``FODF_SHELLS`` are the shells used @@ -42,7 +42,7 @@ to be executed with ``sbatch``. #SBATCH --mem=0 #SBATCH --time=48:00:00 - nextflow -c singularity.conf run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + nextflow -c singularity.conf run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume To launch on multiple nodes, you must to use the MPI option that use Ignite executor. The following example use 2 nodes with 32 threads on each nodes. The follwing lines @@ -59,7 +59,7 @@ must be saved in ``.sh`` file (e.g. ``cmd.sh``) to be executed with ``sbatch``. export NXF_CLUSTER_SEED=$(shuf -i 0-16777216 -n 1) - srun nextflow -c singularity.conf run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -with-mpi -resume + srun nextflow -c singularity.conf run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -with-mpi -resume To launch the pipeline on the HPC: diff --git a/source/pipeline/options.rst b/source/pipeline/options.rst index b9179a7..1ee8882 100644 --- a/source/pipeline/options.rst +++ b/source/pipeline/options.rst @@ -2,7 +2,7 @@ Options ======= To display the options of Tractoflow, please use -``nextflow run tractoflow/main.nf --help``. +``nextflow run tractoflow -r 2.3.0 --help``. Mandatory arguments ------------ diff --git a/source/pipeline/profiles.rst b/source/pipeline/profiles.rst index ad06f57..d6b0c3f 100644 --- a/source/pipeline/profiles.rst +++ b/source/pipeline/profiles.rst @@ -7,7 +7,7 @@ To select one or multiple profiles, please use the ``-profile`` option. For exam :: - $> nextflow run tractoflow/main.nf --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -profile macos,fully_reproducible -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -profile macos,fully_reproducible -with-singularity singularity_name.sif -resume Profiles available ------------------ From f6ac6594fcc020c7f96d39ebdc770434d01d22f8 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 2 Jun 2022 14:22:36 -0400 Subject: [PATCH 3/8] update installation --- source/installation/install.rst | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/source/installation/install.rst b/source/installation/install.rst index d1bf9c5..5749a55 100644 --- a/source/installation/install.rst +++ b/source/installation/install.rst @@ -1,25 +1,29 @@ -Install -======= +Fast Installation +================= Easy install method ------------------- -Enter this command in your terminal (it downloads the container and TractoFlow code in the current directory): +Enter this command in your terminal (it downloads the container and TractoFlow code in the current directory - Make sure nextflow is already installed before running this command): :: curl -s https://tractoflow-documentation.readthedocs.io/en/2.3.0/install.sh | bash + +Detailed Installation +===================== + TractoFlow pipeline ------------------- Release ####### -Download the last release of TractoFlow pipeline: +Download TractoFlow pipeline: :: - $> wget https://github.com/scilus/tractoflow/releases/download/2.3.0/tractoflow-2.3.0.zip && unzip tractoflow-2.3.0.zip + $> nextflow pull scilus/tractoflow For developers ############## @@ -34,6 +38,11 @@ Clone TractoFlow pipeline repository: # Clone with SSH $> git clone git@github.com:scilus/tractoflow.git +As a developer you will have to run tractoflow using this command: + +:: + + nextflow run tractoflow/main.nf --help .. _singularity-tractoflow: @@ -72,7 +81,7 @@ Then, you can build the singularity image: :: - $> singularity build singularity_name.sif singularity_tractoflow.def + $> singularity build singularity_name.sif singularity_scilus.def .. _docker-tractoflow: From 6611e2d92afc30d09c196ffbf86c35e3ab4a98cd Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Wed, 9 Nov 2022 11:44:52 -0500 Subject: [PATCH 4/8] update documentation --- source/data/install.sh | 2 +- source/index.rst | 6 +++--- source/installation/install.rst | 8 ++++---- source/pipeline/options.rst | 16 ++++------------ source/pipeline/profiles.rst | 2 +- source/reference/changelog.rst | 20 ++++++++++++++++++++ 6 files changed, 33 insertions(+), 21 deletions(-) diff --git a/source/data/install.sh b/source/data/install.sh index 958c446..60fa30e 100644 --- a/source/data/install.sh +++ b/source/data/install.sh @@ -4,5 +4,5 @@ echo "Downloading TractoFlow..." nextflow pull scilus/tractoflow echo "Done: TractoFlow downloaded." echo "Downloading TractoFlow Singularity..." -wget -q --show-progress http://scil.dinf.usherbrooke.ca/containers_list/scilus_1.3.0.sif +wget -q --show-progress http://scil.usherbrooke.ca/containers_list/scilus_1.4.0.sif echo "Done: TractoFlow Singularity downloaded." diff --git a/source/index.rst b/source/index.rst index 29d0f3d..224cbc9 100644 --- a/source/index.rst +++ b/source/index.rst @@ -2,8 +2,7 @@ Welcome to the TractoFlow user documentation! ============================================= .. note:: - New release available: 2.3.0. - TractoFlow now support BIDS as input data. + New release available: 2.4.0. TractoFlow pipeline is developed by the Sherbrooke Connectivity Imaging Lab (`SCIL`_) in order to process diffusion MRI dataset from the raw data to the tractography. @@ -27,7 +26,7 @@ For MacOS users, please see this section :ref:`docker-tractoflow` for setup. For any issues or difficulties with TractoFlow, please use our Neurostar tag: https://neurostars.org/tag/tractoflow .. tip:: - If you want to analyse datasets with white-matter lesions check the tractoflow profile ABS. + If you want to analyse datasets with white-matter lesions use profile ABS. .. toctree:: :maxdepth: 1 @@ -42,6 +41,7 @@ For any issues or difficulties with TractoFlow, please use our Neurostar tag: ht pipeline/steps pipeline/input + pipeline/options pipeline/profiles pipeline/launch diff --git a/source/installation/install.rst b/source/installation/install.rst index 5749a55..b1d7b29 100644 --- a/source/installation/install.rst +++ b/source/installation/install.rst @@ -7,7 +7,7 @@ Easy install method Enter this command in your terminal (it downloads the container and TractoFlow code in the current directory - Make sure nextflow is already installed before running this command): :: - curl -s https://tractoflow-documentation.readthedocs.io/en/2.3.0/install.sh | bash + curl -s https://tractoflow-documentation.readthedocs.io/en/2.4.0/install.sh | bash Detailed Installation @@ -56,13 +56,13 @@ Download the last release of the Singularity container for TractoFlow: :: - $> wget http://scil.dinf.usherbrooke.ca/containers_list/scilus_1.3.0.sif + $> wget http://scil.usherbrooke.ca/containers_list/scilus_1.4.0.sif Or if you have sudo privileges :: - $> sudo singularity build scilus_1.3.0.sif docker://scilus/scilus:1.3.0 + $> sudo singularity build scilus_1.4.0.sif docker://scilus/scilus:1.4.0 For developers ############## @@ -95,6 +95,6 @@ Download the last release of the Docker container for TractoFlow: :: - $> docker pull scilus/scilus:1.3.0 + $> docker pull scilus/scilus:1.4.0 Please see :ref:`profiles` section to use `macos` profile. diff --git a/source/pipeline/options.rst b/source/pipeline/options.rst index 1ee8882..4db8efe 100644 --- a/source/pipeline/options.rst +++ b/source/pipeline/options.rst @@ -2,18 +2,7 @@ Options ======= To display the options of Tractoflow, please use -``nextflow run tractoflow -r 2.3.0 --help``. - -Mandatory arguments ------------- - -``--dti_shells dti_shells`` - Shells selected to compute the dti metrics (generally b < 1200). - Please write them between quotes e.g. (--dti_shells "0 300"). - -``--fodf_shells fodf_shells`` - Shells selected to compute the fodf metrics (generally b > 700). - Please write them between quotes e.g. (--fodf_shells "0 1000"). +``nextflow run tractoflow -r 2.4.0 --help``. Optional BIDS arguments ------------ @@ -117,6 +106,9 @@ Options list ``--min_fa MIN_THRESHOLD`` (default: 0.5) Minimum FA threshold to compute the FRF. +``--min_nvox MIN_NVOX_THRESHOLD`` (default: 300) + Minimum number of voxels to compute the FRF. + ``--roi_radius RADIUS`` (default: 20) Region of interest radius to compute the FRF. This ROI starts from the center of the 3D volume (sizeX/2, sizeY/2, sizeZ/2). diff --git a/source/pipeline/profiles.rst b/source/pipeline/profiles.rst index d6b0c3f..f6bacf4 100644 --- a/source/pipeline/profiles.rst +++ b/source/pipeline/profiles.rst @@ -7,7 +7,7 @@ To select one or multiple profiles, please use the ``-profile`` option. For exam :: - $> nextflow run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -profile macos,fully_reproducible -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.4.0 --root input_folder -profile macos,fully_reproducible -with-singularity singularity_name.sif -resume Profiles available ------------------ diff --git a/source/reference/changelog.rst b/source/reference/changelog.rst index 088209c..678824f 100644 --- a/source/reference/changelog.rst +++ b/source/reference/changelog.rst @@ -1,7 +1,27 @@ Changelog ========= +2.4.0 +######## +Date: October 2022 + +New features + + 2.3.0 +######## +Date: 05 April 2022 + +New features + - New profile Atlas Based Segmentation (-profile ABS) + - New profile "skip preprocessing" for HCP dataset (-profile skip_preprocessing) + - New profile "bundling" to run tracking spec skip preprocessing for HCP dataset (-profile skip_preprocessing) + - New profile + - Add option to compute dwi sh (-sh_fitting true) + - Gibbs correction (-run_gibbs_correction true) + + +2.2.1 ######### Date: 09 April 2021 From a75bc5d157dc34d214e5aff0cef24bd335903231 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Wed, 9 Nov 2022 12:16:00 -0500 Subject: [PATCH 5/8] add ABS input for BIDS and non BIDS input structure --- source/pipeline/input.rst | 15 ++++++++++++--- source/pipeline/options.rst | 25 +++++++++++++++++++++++-- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/source/pipeline/input.rst b/source/pipeline/input.rst index 45d82ed..2f588fc 100644 --- a/source/pipeline/input.rst +++ b/source/pipeline/input.rst @@ -14,6 +14,8 @@ before launching the processing to valide the BIDS format. In the case that some tags or informations are missing, TractoFlow will create a json file in ``results/Read_BIDS``. Please complete missing informations and relaunch the pipeline replacing ``--bids YOUR_BIDS_DATASET`` with ``--bids_config results/Read_BIDS/tractoflow_bids_struct.json``. +If you have a BIDS structure and want to use `-profile ABS` you need to use the `--fs` option to point to your freesurfer folder output. + If you have any problems, contact us on NeuroStar (https://neurostars.org/tag/tractoflow). Root parameter @@ -29,21 +31,28 @@ is called using ``--root`` and requires the following file structure: │ ├── dwi.nii.gz │ ├── bval │ ├── bvec + │ ├── t1.nii.gz │ ├── rev_b0.nii.gz (optional) - │ └── t1.nii.gz + │ ├── aparc+aseg.nii.gz (optional) + │ └── wmparc.nii.gz (optional) └── S2 ├── dwi.nii.gz ├── bval ├── bvec + ├── t1.nii.gz ├── rev_b0.nii.gz (optional) - └── t1.nii.gz + ├── aparc+aseg.nii.gz (optional) + └── wmparc.nii.gz (optional) The `root` folder must contains subjects folders (e.g. `S1`, `S2`,...). Each subject folder contains the required images: * ``dwi.nii.gz`` are the diffusion weighted images. * ``bval`` is the b-value file in the FSL format. * ``bvec`` is the b-vector file in the FSL format. - * ``rev_b0.nii.gz`` (optional) is the reversed phase encoded b0 image also called blip-up/blip-down. Used to correct distortion due to diffusion acquisition (`Documentation`_). * ``t1.nii.gz`` is the T1 weighted image. + * ``rev_b0.nii.gz`` (optional) is the reversed phase encoded b0 image also called blip-up/blip-down. Used to correct distortion due to diffusion acquisition (`Documentation`_). + * ``aparc+aseg.nii.gz`` (optional) is the freesurfer gm segmented image. + * ``wmparc.nii.gz`` (optional) is the freesurfer wm segmented image. + .. _Documentation: https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/topup#topup_-_A_tool_for_estimating_and_correcting_susceptibility_induced_distortions diff --git a/source/pipeline/options.rst b/source/pipeline/options.rst index 4db8efe..608e222 100644 --- a/source/pipeline/options.rst +++ b/source/pipeline/options.rst @@ -18,8 +18,13 @@ Optional BIDS arguments If set, it will remove all the participants that are missing any information. +``--fs "freesurfer_output_folder"`` (default: none) + If you want to run Tractoflow-ABS (Atlas Based Segmentation) combined with a BIDS structure input + you need to have this argument. + Options list ------------ + ``--b0_thr_extract_b0 MAX_VALUE`` (default: 10) All b-values below a maximum value are considered b=0 images. @@ -84,6 +89,24 @@ Options list ``--dwi_interpolation METHOD`` (default: lin) Interpolation method [nn, lin, quad, cubic]. +``--max_dti_shell_value`` (default: 1200) + Maximum shell threshold to be consider as a DTI shell (b <= 1200). + This is the default behaviour to select DTI shells. + +``--dti_shells`` + Shells selected to compute the DTI metrics (generally b <= 1200). + Please write them between quotes e.g. (--dti_shells "0 300 1000"). + If selected, it will overwrite max_dti_shell_value. + +``--min_fodf_shell_value`` (default: 700) + Minimum shell threshold to be consider as a fODF shell (b >= 700). + This is the default behaviour to select fODF shells. + +``--fodf_shells`` + Shells selected to compute the fODF metrics (generally b >= 700). + Please write them between quotes e.g. (--fodf_shells "0 1000 2000"). + If selected, it will overwrite min_fodf_shell_value. + ``--run_t1_denoising BOOL`` (default: true) Run T1 denoising using NLmean algorithm. @@ -241,8 +264,6 @@ Optional Local Tracking arguments [LOCAL] List of random seed numbers for the random number generator. Please write them as list separated using commat WITHOUT SPACE e.g. (--local_random_seed 0,1,2) - - ``--template_t1 PATH`` (default: /human-data/mni_152_sym_09c/t1) Path to the template T1 directory for antsBrainExtraction. The folder must contain t1_template.nii.gz and t1_brain_probability_map.nii.gz. From 18e1f7ee5ccc5659807a7ea9d5a5bb8e742596fb Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Wed, 9 Nov 2022 12:42:51 -0500 Subject: [PATCH 6/8] update to 2.4.0 --- source/pipeline/launch.rst | 12 ++++++------ source/reference/changelog.rst | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/source/pipeline/launch.rst b/source/pipeline/launch.rst index e2060b7..3bf5d09 100644 --- a/source/pipeline/launch.rst +++ b/source/pipeline/launch.rst @@ -9,14 +9,14 @@ To run the pipeline, use the following command: :: # With Singularity - $> nextflow run tractoflow -r 2.3.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.4.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume # Or - $> nextflow run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.4.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume # With Docker - $> nextflow run tractoflow -r 2.3.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume + $> nextflow run tractoflow -r 2.4.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume # Or - $> nextflow run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume + $> nextflow run tractoflow -r 2.4.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume Where ``DTI_SHELLS`` are the shells used to compute the DTI metrics (typically b-value < 1200 e.g. "0 1000") and ``FODF_SHELLS`` are the shells used @@ -42,7 +42,7 @@ to be executed with ``sbatch``. #SBATCH --mem=0 #SBATCH --time=48:00:00 - nextflow -c singularity.conf run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + nextflow -c singularity.conf run tractoflow -r 2.4.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume To launch on multiple nodes, you must to use the MPI option that use Ignite executor. The following example use 2 nodes with 32 threads on each nodes. The follwing lines @@ -59,7 +59,7 @@ must be saved in ``.sh`` file (e.g. ``cmd.sh``) to be executed with ``sbatch``. export NXF_CLUSTER_SEED=$(shuf -i 0-16777216 -n 1) - srun nextflow -c singularity.conf run tractoflow -r 2.3.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -with-mpi -resume + srun nextflow -c singularity.conf run tractoflow -r 2.4.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -with-mpi -resume To launch the pipeline on the HPC: diff --git a/source/reference/changelog.rst b/source/reference/changelog.rst index 678824f..522d080 100644 --- a/source/reference/changelog.rst +++ b/source/reference/changelog.rst @@ -8,7 +8,7 @@ Date: October 2022 New features -2.3.0 +2.4.0 ######## Date: 05 April 2022 From 9f08483d8cb9b9e02a92ed4d14e2017959deb22d Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 10 Nov 2022 14:03:35 -0500 Subject: [PATCH 7/8] answer Charles comments --- source/pipeline/input.rst | 2 +- source/pipeline/launch.rst | 8 ++++---- source/pipeline/profiles.rst | 2 +- source/reference/changelog.rst | 15 +++++++++------ 4 files changed, 15 insertions(+), 12 deletions(-) diff --git a/source/pipeline/input.rst b/source/pipeline/input.rst index 2f588fc..1d414e1 100644 --- a/source/pipeline/input.rst +++ b/source/pipeline/input.rst @@ -22,7 +22,7 @@ Root parameter -------------- It is possible not to follow the BIDS format. In that case, the input root parameter -is called using ``--root`` and requires the following file structure: +is called using ``--input`` and requires the following file structure: :: diff --git a/source/pipeline/launch.rst b/source/pipeline/launch.rst index 3bf5d09..30761c4 100644 --- a/source/pipeline/launch.rst +++ b/source/pipeline/launch.rst @@ -11,12 +11,12 @@ To run the pipeline, use the following command: # With Singularity $> nextflow run tractoflow -r 2.4.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume # Or - $> nextflow run tractoflow -r 2.4.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.4.0 --input input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume # With Docker $> nextflow run tractoflow -r 2.4.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume # Or - $> nextflow run tractoflow -r 2.4.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume + $> nextflow run tractoflow -r 2.4.0 --input input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume Where ``DTI_SHELLS`` are the shells used to compute the DTI metrics (typically b-value < 1200 e.g. "0 1000") and ``FODF_SHELLS`` are the shells used @@ -42,7 +42,7 @@ to be executed with ``sbatch``. #SBATCH --mem=0 #SBATCH --time=48:00:00 - nextflow -c singularity.conf run tractoflow -r 2.4.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + nextflow -c singularity.conf run tractoflow -r 2.4.0 --input input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume To launch on multiple nodes, you must to use the MPI option that use Ignite executor. The following example use 2 nodes with 32 threads on each nodes. The follwing lines @@ -59,7 +59,7 @@ must be saved in ``.sh`` file (e.g. ``cmd.sh``) to be executed with ``sbatch``. export NXF_CLUSTER_SEED=$(shuf -i 0-16777216 -n 1) - srun nextflow -c singularity.conf run tractoflow -r 2.4.0 --root input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -with-mpi -resume + srun nextflow -c singularity.conf run tractoflow -r 2.4.0 --input input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -with-mpi -resume To launch the pipeline on the HPC: diff --git a/source/pipeline/profiles.rst b/source/pipeline/profiles.rst index f6bacf4..1d79fb3 100644 --- a/source/pipeline/profiles.rst +++ b/source/pipeline/profiles.rst @@ -7,7 +7,7 @@ To select one or multiple profiles, please use the ``-profile`` option. For exam :: - $> nextflow run tractoflow -r 2.4.0 --root input_folder -profile macos,fully_reproducible -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.4.0 --input input_folder -profile macos,fully_reproducible -with-singularity singularity_name.sif -resume Profiles available ------------------ diff --git a/source/reference/changelog.rst b/source/reference/changelog.rst index 522d080..86acf7f 100644 --- a/source/reference/changelog.rst +++ b/source/reference/changelog.rst @@ -3,20 +3,23 @@ Changelog 2.4.0 ######## -Date: October 2022 +Date: November 2022 New features - - -2.4.0 + - Automatic extraction of shells when computing DTI and fODF + - Skip step bet_prelim_dwi when not needed + - Add remove_invalid step in Tracking processes + - Add possibility for complex BIDS structure with multiband acquisition and full reverse encoding acquisitions. (only available with cuda profile) + - New profile "bundling". It will activate custom tracking parameters to improve recobundle results. Local tracking will be enable with fa seeding mask and tracking mask. + - New profile "connectomics". It will activate custom tracking parameters to improve connectomics analysis. + +2.3.0 ######## Date: 05 April 2022 New features - New profile Atlas Based Segmentation (-profile ABS) - New profile "skip preprocessing" for HCP dataset (-profile skip_preprocessing) - - New profile "bundling" to run tracking spec skip preprocessing for HCP dataset (-profile skip_preprocessing) - - New profile - Add option to compute dwi sh (-sh_fitting true) - Gibbs correction (-run_gibbs_correction true) From aa5ee9f10372e851eae60f1e5b556c709de2f06c Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Wed, 16 Nov 2022 13:49:24 -0500 Subject: [PATCH 8/8] fix install and launch --- source/installation/install.rst | 2 +- source/pipeline/launch.rst | 12 ++++-------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/source/installation/install.rst b/source/installation/install.rst index b1d7b29..47ff4c6 100644 --- a/source/installation/install.rst +++ b/source/installation/install.rst @@ -81,7 +81,7 @@ Then, you can build the singularity image: :: - $> singularity build singularity_name.sif singularity_scilus.def + $> singularity build scilus_1.4.0.sif singularity_scilus.def .. _docker-tractoflow: diff --git a/source/pipeline/launch.rst b/source/pipeline/launch.rst index 30761c4..f2a612d 100644 --- a/source/pipeline/launch.rst +++ b/source/pipeline/launch.rst @@ -9,18 +9,14 @@ To run the pipeline, use the following command: :: # With Singularity - $> nextflow run tractoflow -r 2.4.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.4.0 --bids input_bids -with-singularity scilus_1.4.0.sif -resume # Or - $> nextflow run tractoflow -r 2.4.0 --input input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-singularity singularity_name.sif -resume + $> nextflow run tractoflow -r 2.4.0 --input input_folder -with-singularity scilus_1.4.0.sif -resume # With Docker - $> nextflow run tractoflow -r 2.4.0 --bids input_bids --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume + $> nextflow run tractoflow -r 2.4.0 --bids input_bids -with-docker scilus/scilus:1.4.0 -resume # Or - $> nextflow run tractoflow -r 2.4.0 --input input_folder --dti_shells "DTI_SHELLS" --fodf_shells "FODF_SHELLS" -with-docker scilus/docker-tractoflow:2.1.1 -resume - -Where ``DTI_SHELLS`` are the shells used to compute the DTI metrics -(typically b-value < 1200 e.g. "0 1000") and ``FODF_SHELLS`` are the shells used -to compute the fODF metrics (typically b > 700 e.g. "0 1000 2000"). + $> nextflow run tractoflow -r 2.4.0 --input input_folder -with-docker scilus/scilus:1.4.0 -resume If you want to skip steps already processed by an anterior run, you can add `-resume` option in the command line.