diff --git a/CHANGES b/CHANGES index 337ae51f42..27c7ae31b7 100644 --- a/CHANGES +++ b/CHANGES @@ -28,6 +28,7 @@ Next release * FIX: Correct linking/copying fallback behavior (https://github.com/nipy/nipype/pull/1391) * ENH: Nipype workflow and interfaces for FreeSurfer's recon-all (https://github.com/nipy/nipype/pull/1326) * FIX: Permit relative path for concatenated_file input to Concatenate() (https://github.com/nipy/nipype/pull/1411) +* ENH: Makes ReconAll workflow backwards compatible with FreeSurfer 5.3.0 (https://github.com/nipy/nipype/pull/1434) Release 0.11.0 (September 15, 2015) ============ diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 2980f147d8..5196292835 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -780,7 +780,7 @@ class SegStatsReconAllInputSpec(SegStatsInputSpec): # implicit ribbon = traits.File(mandatory=True, exists=True, desc="Input file mri/ribbon.mgz") - presurf_seg = File(mandatory=True, exists=True, + presurf_seg = File(exists=True, desc="Input segmentation volume") transform = File(mandatory=True, exists=True, desc="Input transform file") @@ -796,6 +796,8 @@ class SegStatsReconAllInputSpec(SegStatsInputSpec): desc="Input file must be /surf/lh.pial") rh_pial = File(mandatory=True, exists=True, desc="Input file must be /surf/rh.pial") + aseg = File(exists=True, + desc="Mandatory implicit input in 5.3") copy_inputs = traits.Bool(desc="If running as a node, set this to True " + "otherwise, this will copy the implicit inputs " + "to the node directory.") @@ -859,7 +861,7 @@ def run(self, **inputs): copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') copy2subjdir(self, self.inputs.rh_white, - 'Surf', 'Rh.White') + 'surf', 'rh.white') copy2subjdir(self, self.inputs.lh_pial, 'surf', 'lh.pial') copy2subjdir(self, self.inputs.rh_pial, @@ -868,12 +870,13 @@ def run(self, **inputs): 'mri', 'ribbon.mgz') copy2subjdir(self, self.inputs.presurf_seg, 'mri', 'aseg.presurf.mgz') + copy2subjdir(self, self.inputs.aseg, + 'mri', 'aseg.mgz') copy2subjdir(self, self.inputs.transform, os.path.join('mri', 'transforms'), 'talairach.xfm') copy2subjdir(self, self.inputs.in_intensity, 'mri') - if isdefined(self.inputs.brainmask_file): - copy2subjdir(self, self.inputs.brainmask_file, 'mri') + copy2subjdir(self, self.inputs.brainmask_file, 'mri') return super(SegStatsReconAll, self).run(**inputs) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py b/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py index 27ea0eb162..8925054f05 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py @@ -19,6 +19,7 @@ def test_Aparc2Aseg_inputs(): environ=dict(nohash=True, usedefault=True, ), + filled=dict(), hypo_wm=dict(argstr='--hypo-as-wm', mandatory=False, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py b/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py index 5be32abc21..851c2acff9 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py @@ -19,6 +19,7 @@ def test_Curvature_inputs(): usedefault=True, ), in_file=dict(argstr='%s', + copyfile=True, mandatory=True, position=-2, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py b/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py index f2ca8c2a64..a34894fe99 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py @@ -35,6 +35,7 @@ def test_MakeSurfaces_inputs(): in_orig=dict(argstr='-orig %s', mandatory=True, ), + in_white=dict(), in_wm=dict(mandatory=True, ), longitudinal=dict(argstr='-long', @@ -66,6 +67,8 @@ def test_MakeSurfaces_inputs(): subjects_dir=dict(), terminal_output=dict(nohash=True, ), + white=dict(argstr='-white %s', + ), white_only=dict(argstr='-whiteonly', mandatory=False, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py b/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py index e15967ffec..4c0f8dcde6 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py @@ -10,6 +10,7 @@ def test_SegStatsReconAll_inputs(): ), args=dict(argstr='%s', ), + aseg=dict(), avgwf_file=dict(argstr='--avgwfvol %s', ), avgwf_txt_file=dict(argstr='--avgwf %s', @@ -85,8 +86,7 @@ def test_SegStatsReconAll_inputs(): ), partial_volume_file=dict(argstr='--pv %s', ), - presurf_seg=dict(mandatory=True, - ), + presurf_seg=dict(), rh_orig_nofix=dict(mandatory=True, ), rh_pial=dict(mandatory=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py b/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py index 81786550f5..c87f8716b5 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py @@ -6,6 +6,8 @@ def test_VolumeMask_inputs(): input_map = dict(args=dict(argstr='%s', ), + aseg=dict(xor=['in_aseg'], + ), copy_inputs=dict(mandatory=False, ), environ=dict(nohash=True, @@ -16,6 +18,7 @@ def test_VolumeMask_inputs(): ), in_aseg=dict(argstr='--aseg_name %s', mandatory=False, + xor=['aseg'], ), left_ribbonlabel=dict(argstr='--label_left_ribbon %d', mandatory=True, diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 6edb1399e9..6f03a11f6e 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -35,21 +35,30 @@ def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): + """Method to copy an input to the subjects directory""" + # check that the input is defined + if not isdefined(in_file): + return in_file + # check that subjects_dir is defined if isdefined(cls.inputs.subjects_dir): subjects_dir = cls.inputs.subjects_dir else: - subjects_dir = os.getcwd() + subjects_dir = os.getcwd() #if not use cwd + # check for subject_id if not subject_id: if isdefined(cls.inputs.subject_id): subject_id = cls.inputs.subject_id else: - subject_id = 'subject_id' + subject_id = 'subject_id' #default + # check for basename if basename == None: basename = os.path.basename(in_file) + # check which folder to put the file in if folder != None: out_dir = os.path.join(subjects_dir, subject_id, folder) else: out_dir = os.path.join(subjects_dir, subject_id) + # make the output folder if it does not exist if not os.path.isdir(out_dir): os.makedirs(out_dir) out_file = os.path.join(out_dir, basename) @@ -58,7 +67,7 @@ def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): return out_file def createoutputdirs(outputs): - """create an output directories. If not created, some freesurfer interfaces fail""" + """create all output directories. If not created, some freesurfer interfaces fail""" for output in outputs.itervalues(): dirname = os.path.dirname(output) if not os.path.isdir(dirname): @@ -1867,6 +1876,7 @@ class MakeSurfacesInputSpec(FSTraitedSpec): in_filled = File(exists=True, mandatory=True, desc="Implicit input file filled.mgz") # optional + in_white = File(exists=True, desc="Implicit input that is sometimes used") in_label = File(exists=True, mandatory=False, xor=['noaparc'], desc="Implicit input label/.aparc.annot") orig_white = File(argstr="-orig_white %s", exists=True, mandatory=False, @@ -1893,6 +1903,8 @@ class MakeSurfacesInputSpec(FSTraitedSpec): argstr="-max %.1f", desc="No documentation (used for longitudinal processing)") longitudinal = traits.Bool( argstr="-long", desc="No documentation (used for longitudinal processing)") + white = traits.String(argstr="-white %s", + desc="White surface name") copy_inputs = traits.Bool(mandatory=False, desc="If running as a node, set this to True." + "This will copy the input files to the node " + @@ -1947,15 +1959,15 @@ def run(self, **inputs): folder='mri', basename='wm.mgz') copy2subjdir(self, self.inputs.in_filled, folder='mri', basename='filled.mgz') + copy2subjdir(self, self.inputs.in_white, + 'surf', '{0}.white'.format(self.inputs.hemisphere)) for originalfile in [self.inputs.in_aseg, self.inputs.in_T1]: - if isdefined(originalfile): - copy2subjdir(self, originalfile, folder='mri') + copy2subjdir(self, originalfile, folder='mri') for originalfile in [self.inputs.orig_white, self.inputs.orig_pial, self.inputs.in_orig]: - if isdefined(originalfile): - copy2subjdir(self, originalfile, folder='surf') + copy2subjdir(self, originalfile, folder='surf') if isdefined(self.inputs.in_label): copy2subjdir(self, self.inputs.in_label, 'label', '{0}.aparc.annot'.format(self.inputs.hemisphere)) @@ -1972,9 +1984,11 @@ def _format_arg(self, name, spec, value): basename = os.path.basename(value) # whent the -mgz flag is specified, it assumes the mgz extension if self.inputs.mgz: - prefix = basename.rstrip('.mgz') + prefix = os.path.splitext(basename)[0] else: prefix = basename + if prefix == 'aseg': + return # aseg is already the default return spec.argstr % prefix elif name in ['orig_white', 'orig_pial']: # these inputs do take full file paths or even basenames @@ -2011,8 +2025,8 @@ def _list_outputs(self): dest_dir, str(self.inputs.hemisphere) + '.area') # Something determines when a pial surface and thickness file is generated # but documentation doesn't say what. - # The orig_pial flag is just a guess - if isdefined(self.inputs.orig_pial): + # The orig_pial input is just a guess + if isdefined(self.inputs.orig_pial) or self.inputs.white == 'NOWRITE': outputs["out_curv"] = outputs["out_curv"] + ".pial" outputs["out_area"] = outputs["out_area"] + ".pial" outputs["out_pial"] = os.path.join( @@ -2029,7 +2043,7 @@ def _list_outputs(self): class CurvatureInputSpec(FSTraitedSpec): in_file = File(argstr="%s", position=-2, mandatory=True, exists=True, - desc="Input file for Curvature") + copyfile=True, desc="Input file for Curvature") # optional threshold = traits.Float( argstr="-thresh %.3f", mandatory=False, desc="Undocumented input threshold") @@ -2073,7 +2087,6 @@ def _format_arg(self, name, spec, value): if self.inputs.copy_input: if name == 'in_file': basename = os.path.basename(value) - shutil.copy(value, basename) return spec.argstr % basename return super(Curvature, self)._format_arg(name, spec, value) @@ -2301,11 +2314,16 @@ class VolumeMaskInputSpec(FSTraitedSpec): desc="Implicit input left white matter surface") rh_white = File(mandatory=True, exists=True, desc="Implicit input right white matter surface") + aseg = File(exists=True, + xor=['in_aseg'], + desc="Implicit aseg.mgz segmentation. " + + "Specify a different aseg by using the 'in_aseg' input.") subject_id = traits.String('subject_id', usedefault=True, position=-1, argstr="%s", mandatory=True, desc="Subject being processed") # optional - in_aseg = File(argstr="--aseg_name %s", mandatory=False, exists=True, + in_aseg = File(argstr="--aseg_name %s", mandatory=False, + exists=True, xor=['aseg'], desc="Input aseg file for VolumeMask") save_ribbon = traits.Bool(argstr="--save_ribbon", mandatory=False, desc="option to save just the ribbon for the " + @@ -2364,6 +2382,8 @@ def run(self, **inputs): copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') copy2subjdir(self, self.inputs.rh_white, 'surf', 'rh.white') copy2subjdir(self, self.inputs.in_aseg, 'mri') + copy2subjdir(self, self.inputs.aseg, 'mri', 'aseg.mgz') + return super(VolumeMask, self).run(**inputs) def _format_arg(self, name, spec, value): @@ -2726,6 +2746,8 @@ class Aparc2AsegInputSpec(FSTraitedSpec): rh_annotation = File(mandatory=True, exists=True, desc="Input file must be /label/rh.aparc.annot") # optional + filled = File(exists=True, + desc="Implicit input filled file. Only required with FS v5.3.") aseg = File(argstr="--aseg %s", mandatory=False, exists=True, desc="Input aseg file") volmask = traits.Bool(argstr="--volmask", mandatory=False, @@ -2808,6 +2830,7 @@ def run(self, **inputs): copy2subjdir(self, self.inputs.rh_ribbon, 'mri', 'rh.ribbon.mgz') copy2subjdir(self, self.inputs.ribbon, 'mri', 'ribbon.mgz') copy2subjdir(self, self.inputs.aseg, 'mri') + copy2subjdir(self, self.inputs.filled, 'mri', 'filled.mgz') copy2subjdir(self, self.inputs.lh_annotation, 'label') copy2subjdir(self, self.inputs.rh_annotation, 'label') @@ -2816,7 +2839,8 @@ def run(self, **inputs): def _format_arg(self, name, spec, value): if name == 'aseg': # aseg does not take a full filename - return spec.argstr % os.path.basename(value).replace('.mgz', '') + basename = os.path.basename(value).replace('.mgz', '') + return spec.argstr % basename elif name == 'out_file': return spec.argstr % os.path.abspath(value) diff --git a/nipype/workflows/smri/freesurfer/autorecon1.py b/nipype/workflows/smri/freesurfer/autorecon1.py index e29a949166..3b94315218 100644 --- a/nipype/workflows/smri/freesurfer/autorecon1.py +++ b/nipype/workflows/smri/freesurfer/autorecon1.py @@ -32,9 +32,9 @@ def checkT1s(T1_files, cw256=False): resample_type = 'cubic' if len(T1_files) > 1 else 'interpolate' return T1_files, cw256, resample_type, origvol_names - -def create_AutoRecon1(name="AutoRecon1", longitudinal=False, field_strength='1.5T', - custom_atlas=None, plugin_args=None): +def create_AutoRecon1(name="AutoRecon1", longitudinal=False, distance=None, + custom_atlas=None, plugin_args=None, shrink=None, stop=None, + fsvernum=5.3): """Creates the AutoRecon1 workflow in nipype. Inputs:: @@ -235,22 +235,11 @@ def createTemplate(in_files, out_file): bias_correction = pe.Node(MNIBiasCorrection(), name="Bias_correction") bias_correction.inputs.iterations = 1 bias_correction.inputs.protocol_iterations = 1000 - if field_strength == '3T': - # 3T params from Zheng, Chee, Zagorodnov 2009 NeuroImage paper - # "Improvement of brain segmentation accuracy by optimizing - # non-uniformity correction using N3" - # namely specifying iterations, proto-iters and distance: - bias_correction.inputs.distance = 50 - else: - # 1.5T default - bias_correction.inputs.distance = 200 - # per c.larsen, decrease convergence threshold (default is 0.001) - bias_correction.inputs.stop = 0.0001 - # per c.larsen, decrease shrink parameter: finer sampling (default is 4) - bias_correction.inputs.shrink = 2 - # add the mask, as per c.larsen, bias-field correction is known to work - # much better when the brain area is properly masked, in this case by - # brainmask.mgz. + bias_correction.inputs.distance = distance + if stop: + bias_correction.inputs.stop = stop + if shrink: + bias_correction.inputs.shrink = shrink bias_correction.inputs.no_rescale = True bias_correction.inputs.out_file = 'orig_nu.mgz' @@ -344,6 +333,27 @@ def awkfile(in_file, log_file): tal_qc = pe.Node(TalairachQC(), name="Detect_Aligment_Failures") ar1_wf.connect([(awk_logfile, tal_qc, [('log_file', 'log_file')])]) + + if fsvernum < 6: + # intensity correction is performed before normalization + intensity_correction = pe.Node( + MNIBiasCorrection(), name="Intensity_Correction") + intensity_correction.inputs.out_file = 'nu.mgz' + intensity_correction.inputs.iterations = 2 + ar1_wf.connect([(add_xform_to_orig, intensity_correction, [('out_file', 'in_file')]), + (copy_transform, intensity_correction, [('out_file', 'transform')])]) + + + add_to_header_nu = pe.Node(AddXFormToHeader(), name="Add_XForm_to_NU") + add_to_header_nu.inputs.copy_name = True + add_to_header_nu.inputs.out_file = 'nu.mgz' + ar1_wf.connect([(intensity_correction, add_to_header_nu, [('out_file', 'in_file'), + ]), + (copy_transform, add_to_header_nu, + [('out_file', 'transform')]) + ]) + + # Intensity Normalization # Performs intensity normalization of the orig volume and places the result in mri/T1.mgz. # Attempts to correct for fluctuations in intensity that would otherwise make intensity-based @@ -353,10 +363,13 @@ def awkfile(in_file, log_file): mri_normalize = pe.Node(Normalize(), name="Normalize_T1") mri_normalize.inputs.gradient = 1 mri_normalize.inputs.out_file = 'T1.mgz' - ar1_wf.connect([(add_xform_to_orig_nu, mri_normalize, [('out_file', 'in_file')]), - (copy_transform, mri_normalize, - [('out_file', 'transform')]), - ]) + + if fsvernum < 6: + ar1_wf.connect([(add_to_header_nu, mri_normalize, [('out_file', 'in_file')])]) + else: + ar1_wf.connect([(add_xform_to_orig_nu, mri_normalize, [('out_file', 'in_file')])]) + + ar1_wf.connect([(copy_transform, mri_normalize, [('out_file', 'transform')])]) # Skull Strip """ @@ -370,8 +383,12 @@ def awkfile(in_file, log_file): if plugin_args: mri_em_register.plugin_args = plugin_args - ar1_wf.connect([(add_xform_to_orig_nu, mri_em_register, [('out_file', 'in_file')]), - (inputspec, mri_em_register, [('num_threads', 'num_threads'), + if fsvernum < 6: + ar1_wf.connect(add_to_header_nu, 'out_file', mri_em_register, 'in_file') + else: + ar1_wf.connect(add_xform_to_orig_nu, 'out_file', mri_em_register, 'in_file') + + ar1_wf.connect([(inputspec, mri_em_register, [('num_threads', 'num_threads'), ('reg_template_withskull', 'template')])]) brainmask = pe.Node(WatershedSkullStrip(), @@ -426,8 +443,14 @@ def awkfile(in_file, log_file): 'brainmask_auto', 'brainmask', 'braintemplate'] - outputspec = pe.Node(IdentityInterface(fields=outputs), - name="outputspec") + + if fsvernum < 6: + outputspec = pe.Node(IdentityInterface(fields=outputs + ['nu']), + name="outputspec") + ar1_wf.connect([(add_to_header_nu, outputspec, [('out_file', 'nu')])]) + else: + outputspec = pe.Node(IdentityInterface(fields=outputs), + name="outputspec") ar1_wf.connect([(T1_image_preparation, outputspec, [('out_file', 'origvols')]), (T2_convert, outputspec, [('out_file', 't2_raw')]), diff --git a/nipype/workflows/smri/freesurfer/autorecon2.py b/nipype/workflows/smri/freesurfer/autorecon2.py index f09e991439..41f15a017b 100644 --- a/nipype/workflows/smri/freesurfer/autorecon2.py +++ b/nipype/workflows/smri/freesurfer/autorecon2.py @@ -9,12 +9,14 @@ def copy_ltas(in_file, subjects_dir, subject_id, long_template): return out_file def create_AutoRecon2(name="AutoRecon2", longitudinal=False, - field_strength="1.5T", plugin_args=None): + plugin_args=None, fsvernum=5.3, + stop=None, shrink=None, distance=None): # AutoRecon2 # Workflow ar2_wf = pe.Workflow(name=name) inputspec = pe.Node(IdentityInterface(fields=['orig', + 'nu', # version < 6 'brainmask', 'transform', 'subject_id', @@ -45,40 +47,39 @@ def create_AutoRecon2(name="AutoRecon2", longitudinal=False, inputspec.inputs.timepoints = config['timepoints'] - # NU Intensity Correction - """ - Non-parametric Non-uniform intensity Normalization (N3), corrects for - intensity non-uniformity in MR data, making relatively few assumptions about - the data. This runs the MINC tool 'nu_correct'. - """ - intensity_correction = pe.Node( - MNIBiasCorrection(), name="Intensity_Correction") - intensity_correction.inputs.iterations = 1 - intensity_correction.inputs.protocol_iterations = 1000 - intensity_correction.inputs.stop = 0.0001 - intensity_correction.inputs.shrink = 2 - if field_strength == '3T': - intensity_correction.inputs.distance = 50 - else: - # default for 1.5T scans - intensity_correction.inputs.distance = 200 - - intensity_correction.inputs.out_file = 'nu.mgz' - ar2_wf.connect([(inputspec, intensity_correction, [('orig', 'in_file'), - ('brainmask', 'mask'), - ('transform', 'transform') - ]) - ]) - - add_to_header_nu = pe.Node(AddXFormToHeader(), name="Add_XForm_to_NU") - add_to_header_nu.inputs.copy_name = True - add_to_header_nu.inputs.out_file = 'nu.mgz' - ar2_wf.connect([(intensity_correction, add_to_header_nu, [('out_file', 'in_file'), + if fsvernum >= 6: + # NU Intensity Correction + """ + Non-parametric Non-uniform intensity Normalization (N3), corrects for + intensity non-uniformity in MR data, making relatively few assumptions about + the data. This runs the MINC tool 'nu_correct'. + """ + intensity_correction = pe.Node( + MNIBiasCorrection(), name="Intensity_Correction") + intensity_correction.inputs.out_file = 'nu.mgz' + ar2_wf.connect([(inputspec, intensity_correction, [('orig', 'in_file'), + ('brainmask', 'mask'), + ('transform', 'transform')])]) + + # intensity correction parameters are more specific in 6+ + intensity_correction.inputs.iterations = 1 + intensity_correction.inputs.protocol_iterations = 1000 + if stop: + intensity_correction.inputs.stop = stop + if shrink: + intensity_correction.inputs.shrink = shrink + intensity_correction.inputs.distance = distance + + add_to_header_nu = pe.Node(AddXFormToHeader(), name="Add_XForm_to_NU") + add_to_header_nu.inputs.copy_name = True + add_to_header_nu.inputs.out_file = 'nu.mgz' + ar2_wf.connect([(intensity_correction, add_to_header_nu, [('out_file', 'in_file'), ]), - (inputspec, add_to_header_nu, - [('transform', 'transform')]) + (inputspec, add_to_header_nu, + [('transform', 'transform')]) ]) + # EM Registration """ Computes the transform to align the mri/nu.mgz volume to the default GCA @@ -101,8 +102,12 @@ def create_AutoRecon2(name="AutoRecon2", longitudinal=False, align_transform.plugin_args = plugin_args ar2_wf.connect([(inputspec, align_transform, [('brainmask', 'mask'), ('reg_template', 'template'), - ('num_threads', 'num_threads')]), - (add_to_header_nu, align_transform, [('out_file', 'in_file')])]) + ('num_threads', 'num_threads')])]) + if fsvernum >= 6: + ar2_wf.connect([(add_to_header_nu, align_transform, [('out_file', 'in_file')])]) + else: + ar2_wf.connect([(inputspec, align_transform, [('nu', 'in_file')])]) + # CA Normalize """ @@ -126,8 +131,12 @@ def create_AutoRecon2(name="AutoRecon2", longitudinal=False, ar2_wf.connect([(align_transform, ca_normalize, [('out_file', 'transform')]), (inputspec, ca_normalize, [('brainmask', 'mask'), - ('reg_template', 'atlas')]), - (add_to_header_nu, ca_normalize, [('out_file', 'in_file')])]) + ('reg_template', 'atlas')])]) + if fsvernum >= 6: + ar2_wf.connect([(add_to_header_nu, ca_normalize, [('out_file', 'in_file')])]) + else: + ar2_wf.connect([(inputspec, ca_normalize, [('nu', 'in_file')])]) + # CA Register # Computes a nonlinear transform to align with GCA atlas. @@ -146,7 +155,7 @@ def create_AutoRecon2(name="AutoRecon2", longitudinal=False, else: ca_register.inputs.levels = 2 ca_register.inputs.A = 1 - ar2_wf.connect([(ar1_inputs, ca_register, [('template_talairach_m3z', 'l_files')])]) + ar2_wf.connect([(inputspec, ca_register, [('template_talairach_m3z', 'l_files')])]) # Remove Neck """ @@ -157,8 +166,11 @@ def create_AutoRecon2(name="AutoRecon2", longitudinal=False, remove_neck.inputs.radius = 25 remove_neck.inputs.out_file = 'nu_noneck.mgz' ar2_wf.connect([(ca_register, remove_neck, [('out_file', 'transform')]), - (add_to_header_nu, remove_neck, [('out_file', 'in_file')]), (inputspec, remove_neck, [('reg_template', 'template')])]) + if fsvernum >= 6: + ar2_wf.connect([(add_to_header_nu, remove_neck, [('out_file', 'in_file')])]) + else: + ar2_wf.connect([(inputspec, remove_neck, [('nu', 'in_file')])]) # SkullLTA (EM Registration, with Skull) # Computes transform to align volume mri/nu_noneck.mgz with GCA volume @@ -205,8 +217,9 @@ def create_AutoRecon2(name="AutoRecon2", longitudinal=False, fuse_segmentations.inputs.out_file = 'aseg.fused.mgz' ca_label = pe.Node(CALabel(), name='CA_Label') - ca_label.inputs.relabel_unlikely = (9, .3) - ca_label.inputs.prior = 0.5 + if fsvernum >= 6: + ca_label.inputs.relabel_unlikely = (9, .3) + ca_label.inputs.prior = 0.5 ca_label.inputs.align = True ca_label.inputs.out_file = 'aseg.auto_noCCseg.mgz' if plugin_args: @@ -678,8 +691,14 @@ def create_AutoRecon2(name="AutoRecon2", longitudinal=False, outputspec = pe.Node(IdentityInterface(fields=outputs), name="outputspec") - ar2_wf.connect([(add_to_header_nu, outputspec, [('out_file', 'nu')]), - (align_transform, outputspec, [('out_file', 'tal_lta')]), + if fsvernum >= 6: + ar2_wf.connect([(add_to_header_nu, outputspec, [('out_file', 'nu')])]) + else: + # add to outputspec to perserve datasinking + ar2_wf.connect([(inputspec, outputspec, [('nu', 'nu')])]) + + + ar2_wf.connect([(align_transform, outputspec, [('out_file', 'tal_lta')]), (ca_normalize, outputspec, [('out_file', 'norm')]), (ca_normalize, outputspec, [('control_points', 'ctrl_pts')]), (ca_register, outputspec, [('out_file', 'tal_m3z')]), diff --git a/nipype/workflows/smri/freesurfer/autorecon3.py b/nipype/workflows/smri/freesurfer/autorecon3.py index 656f81188e..daace1c8c7 100644 --- a/nipype/workflows/smri/freesurfer/autorecon3.py +++ b/nipype/workflows/smri/freesurfer/autorecon3.py @@ -5,7 +5,7 @@ from nipype.interfaces.io import DataGrabber def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, - th3=True): + th3=True, exvivo=True, entorhinal=True, fsvernum=5.3): # AutoRecon3 # Workflow @@ -162,16 +162,22 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, # Pial Surface ar3_pial = pe.Node(MakeSurfaces(), name="Make_Pial_Surface") - ar3_pial.inputs.no_white = True ar3_pial.inputs.mgz = True ar3_pial.inputs.hemisphere = hemisphere ar3_pial.inputs.copy_inputs = True + + if fsvernum < 6: + ar3_pial.inputs.white = 'NOWRITE' + hemi_wf.connect(hemi_inputspec1, 'white', ar3_pial, 'in_white') + else: + ar3_pial.inputs.no_white = True + hemi_wf.connect([(hemi_inputspec1, ar3_pial, [('white', 'orig_pial'), + ('white', 'orig_white')])]) + hemi_wf.connect([(hemi_inputspec1, ar3_pial, [('wm', 'in_wm'), ('orig', 'in_orig'), ('filled', 'in_filled'), - ('white', 'orig_pial'), - ('white', 'orig_white'), ('brain_finalsurfs', 'in_T1'), ('aseg_presurf', 'in_aseg')]), (ar3_parcellation, ar3_pial, [('out_file', 'in_label')]) @@ -271,25 +277,24 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, volume_mask.inputs.copy_inputs = True - ar3_wf.connect([(inputspec, volume_mask, [('aseg_presurf', 'in_aseg'), - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ]), + ar3_wf.connect([(inputspec, volume_mask, [('lh_white', 'lh_white'), + ('rh_white', 'rh_white')]), (ar3_lh_wf1, volume_mask, [('outputspec.pial', 'lh_pial')]), (ar3_rh_wf1, volume_mask, [('outputspec.pial', 'rh_pial')]), ]) + if fsvernum >= 6: + ar3_wf.connect([(inputspec, volume_mask, [('aseg_presurf', 'in_aseg')])]) + else: + ar3_wf.connect([(inputspec, volume_mask, [('aseg_presurf', 'aseg')])]) + ar3_lh_wf2 = pe.Workflow(name="AutoRecon3_Left_2") ar3_rh_wf2 = pe.Workflow(name="AutoRecon3_Right_2") for hemisphere, hemiwf2 in [('lh', ar3_lh_wf2), ('rh', ar3_rh_wf2)]: if hemisphere == 'lh': - opp_hemi = 'rh' - opp_wf = ar3_rh_wf2 hemiwf1 = ar3_lh_wf1 else: - opp_hemi = 'lh' - opp_wf = ar3_lh_wf2 hemiwf1 = ar3_rh_wf1 hemi_inputs2 = ['wm', @@ -538,14 +543,6 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, #End hemisphere2 workflow - # Relabel Hypointensities - relabel_hypos = pe.Node(RelabelHypointensities(), name="Relabel_Hypointensities") - relabel_hypos.inputs.out_file = 'aseg.presurf.hypos.mgz' - ar3_wf.connect([(inputspec, relabel_hypos, [('aseg_presurf', 'aseg'), - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ])]) - # APARC to ASEG # Adds information from the ribbon into the aseg.mgz (volume parcellation). aparc_2_aseg = pe.Node(Aparc2Aseg(), name="Aparc2Aseg") @@ -566,9 +563,17 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, (volume_mask, aparc_2_aseg, [('rh_ribbon', 'rh_ribbon'), ('lh_ribbon', 'lh_ribbon'), ('out_ribbon', 'ribbon'), - ]), - (relabel_hypos, aparc_2_aseg, [('out_file', 'aseg')]) - ]) + ])]) + if fsvernum < 6: + ar3_wf.connect([(inputspec, aparc_2_aseg, [('aseg_presurf', 'aseg')])]) + else: + # Relabel Hypointensities + relabel_hypos = pe.Node(RelabelHypointensities(), name="Relabel_Hypointensities") + relabel_hypos.inputs.out_file = 'aseg.presurf.hypos.mgz' + ar3_wf.connect([(inputspec, relabel_hypos, [('aseg_presurf', 'aseg'), + ('lh_white', 'lh_white'), + ('rh_white', 'rh_white')])]) + ar3_wf.connect([(relabel_hypos, aparc_2_aseg, [('out_file', 'aseg')])]) aparc_2_aseg_2009 = pe.Node(Aparc2Aseg(), name="Aparc2Aseg_2009") aparc_2_aseg_2009.inputs.volmask = True @@ -589,14 +594,31 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, (volume_mask, aparc_2_aseg_2009, [('rh_ribbon', 'rh_ribbon'), ('lh_ribbon', 'lh_ribbon'), - ('out_ribbon', 'ribbon'), - ]), - (relabel_hypos, aparc_2_aseg_2009, [('out_file', 'aseg')]) - ]) + ('out_ribbon', 'ribbon')])]) + + if fsvernum >= 6: + apas_2_aseg = pe.Node(Apas2Aseg(), name="Apas_2_Aseg") + ar3_wf.connect([(aparc_2_aseg, apas_2_aseg, [('out_file', 'in_file')]), + (relabel_hypos, aparc_2_aseg_2009, [('out_file', 'aseg')])]) + else: + # aseg.mgz gets edited in place, so we'll copy and pass it to the + # outputspec once aparc_2_aseg has completed + def out_aseg(in_aparcaseg, in_aseg, out_file): + import shutil + import os + out_file = os.path.abspath(out_file) + shutil.copy(in_aseg, out_file) + return out_file + apas_2_aseg = pe.Node(Function(['in_aparcaseg', 'in_aseg', 'out_file'], + ['out_file'], + out_aseg), + name="Aseg") + ar3_wf.connect([(aparc_2_aseg, apas_2_aseg, [('out_file', 'in_aparcaseg')]), + (inputspec, apas_2_aseg, [('aseg_presurf', 'in_aseg')]), + (inputspec, aparc_2_aseg_2009, [('aseg_presurf', 'aseg')])]) - apas_2_aseg = pe.Node(Apas2Aseg(), name="Apas_2_Aseg") apas_2_aseg.inputs.out_file = "aseg.mgz" - ar3_wf.connect([(aparc_2_aseg, apas_2_aseg, [('out_file', 'in_file')])]) + # Segmentation Stats """ @@ -623,7 +645,6 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, ar3_wf.connect([(apas_2_aseg, segstats, [('out_file', 'segmentation_file')]), (inputspec, segstats, [('lh_white', 'lh_white'), ('rh_white', 'rh_white'), - ('aseg_presurf', 'presurf_seg'), ('transform', 'transform'), ('norm', 'in_intensity'), ('norm', 'partial_volume_file'), @@ -639,6 +660,12 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, ]), ]) + if fsvernum >= 6: + ar3_wf.connect(inputspec, 'aseg_presurf', segstats, 'presurf_seg') + else: + ar3_wf.connect(inputspec, 'aseg_presurf', segstats, 'aseg') + + # White Matter Parcellation # Adds WM Parcellation info into the aseg and computes stat. @@ -667,8 +694,10 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, ('out_ribbon', 'ribbon'), ]), (apas_2_aseg, wm_parcellation, [('out_file', 'aseg')]), - (aparc_2_aseg, wm_parcellation, [('out_file', 'ctxseg')]) - ]) + (aparc_2_aseg, wm_parcellation, [('out_file', 'ctxseg')])]) + + if fsvernum < 6: + ar3_wf.connect([(inputspec, wm_parcellation, [('filled', 'filled')])]) # White Matter Segmentation Stats @@ -683,7 +712,6 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, ar3_wf.connect([(wm_parcellation, wm_segstats, [('out_file', 'segmentation_file')]), (inputspec, wm_segstats, [('lh_white', 'lh_white'), ('rh_white', 'rh_white'), - ('aseg_presurf', 'presurf_seg'), ('transform', 'transform'), ('norm', 'in_intensity'), ('norm', 'partial_volume_file'), @@ -701,8 +729,15 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, ]), ]) + if fsvernum >= 6: + ar3_wf.connect(inputspec, 'aseg_presurf', wm_segstats, 'presurf_seg') + else: + ar3_wf.connect(inputspec, 'aseg_presurf', wm_segstats, 'aseg') + + # add brodman area maps to the workflow - ba_WF, ba_outputs = create_ba_maps_wf(th3=th3) + ba_WF, ba_outputs = create_ba_maps_wf(th3=th3, exvivo=exvivo, + entorhinal=entorhinal) ar3_wf.connect([(ar3_lh_wf1, ba_WF, [('outputspec.sphere_reg', 'inputspec.lh_sphere_reg'), ('outputspec.thickness_pial', 'inputspec.lh_thickness'), @@ -891,11 +926,12 @@ def create_AutoRecon3(name="AutoRecon3", qcache=False, plugin_args=None, (segstats, outputspec, [('summary_file', 'aseg_stats')]), (aparc_2_aseg_2009, outputspec, [('out_file', 'aparc_a2009s_aseg')]), (aparc_2_aseg, outputspec, [('out_file', 'aparc_aseg')]), - (relabel_hypos, outputspec, [('out_file', 'aseg_presurf_hypos')]), (volume_mask, outputspec, [('out_ribbon', 'ribbon'), ('lh_ribbon', 'lh_ribbon'), - ('rh_ribbon', 'rh_ribbon')]), - ]) + ('rh_ribbon', 'rh_ribbon')])]) + if fsvernum >= 6: + ar3_wf.connect([(relabel_hypos, outputspec, [('out_file', 'aseg_presurf_hypos')])]) + for i, outputs in enumerate([hemi_outputs1, hemi_outputs2]): if i == 0: diff --git a/nipype/workflows/smri/freesurfer/ba_maps.py b/nipype/workflows/smri/freesurfer/ba_maps.py index 337e1f40cb..7fa266250c 100644 --- a/nipype/workflows/smri/freesurfer/ba_maps.py +++ b/nipype/workflows/smri/freesurfer/ba_maps.py @@ -6,7 +6,8 @@ from nipype.interfaces.io import DataGrabber from nipype.interfaces.utility import Merge -def create_ba_maps_wf(name="Brodmann_Area_Maps", th3=True): +def create_ba_maps_wf(name="Brodmann_Area_Maps", th3=True, exvivo=True, + entorhinal=True): # Brodmann Area Maps (BA Maps) and Hinds V1 Atlas inputs = ['lh_sphere_reg', 'rh_sphere_reg', @@ -55,40 +56,54 @@ def create_ba_maps_wf(name="Brodmann_Area_Maps", th3=True): name="outputspec") labels = ["BA1", "BA2", "BA3a", "BA3b", "BA4a", "BA4p", "BA6", - "BA44", "BA45", "V1", "V2", "MT", "entorhinal", "perirhinal"] + "BA44", "BA45", "V1", "V2", "MT", "perirhinal"] + if entorhinal: + labels.insert(-1, 'entorhinal') for hemisphere in ['lh', 'rh']: for threshold in [True, False]: field_template = dict(sphere_reg='surf/{0}.sphere.reg'.format(hemisphere), white='surf/{0}.white'.format(hemisphere)) out_files = list() + source_fields = list() if threshold: for label in labels: - out_file = '{0}.{1}_exvivo.thresh.label'.format(hemisphere, label) + if label == 'perirhinal' and not entorhinal: + # versions < 6.0 do not use thresh.perirhinal + continue + if exvivo: + out_file = '{0}.{1}_exvivo.thresh.label'.format(hemisphere, label) + else: + out_file = '{0}.{1}.thresh.label'.format(hemisphere, label) out_files.append(out_file) field_template[label] = 'label/' + out_file - node_name = 'BA_Maps_' + hemisphere + '_Tresh' + source_fields.append(label) + node_name = 'BA_Maps_' + hemisphere + '_Thresh' else: for label in labels: - out_file = '{0}.{1}_exvivo.label'.format(hemisphere, label) + if exvivo: + out_file = '{0}.{1}_exvivo.label'.format(hemisphere, label) + else: + out_file = '{0}.{1}.label'.format(hemisphere, label) + out_files.append(out_file) field_template[label] = 'label/' + out_file + source_fields.append(label) node_name = 'BA_Maps_' + hemisphere - source_fields = labels + ['sphere_reg', 'white'] - source_subject = pe.Node(DataGrabber(outfields=source_fields), + source_subject = pe.Node(DataGrabber(outfields=source_fields + ['sphere_reg', 'white']), name=node_name + "_srcsubject") source_subject.inputs.template = '*' source_subject.inputs.sort_filelist = False source_subject.inputs.field_template = field_template ba_WF.connect([(inputspec, source_subject, [('src_subject_dir', 'base_directory')])]) - merge_labels = pe.Node(Merge(len(labels)), + merge_labels = pe.Node(Merge(len(out_files)), name=node_name + "_Merge") - for i,label in enumerate(labels): + for i,label in enumerate(source_fields): ba_WF.connect([(source_subject, merge_labels, [(label, 'in{0}'.format(i+1))])]) - node = pe.MapNode(Label2Label(), name=node_name, + node = pe.MapNode(Label2Label(), name=node_name + '_Label2Label', iterfield=['source_label', 'out_file']) node.inputs.hemisphere = hemisphere node.inputs.out_file = out_files diff --git a/nipype/workflows/smri/freesurfer/recon.py b/nipype/workflows/smri/freesurfer/recon.py index 0f1aad8913..4010923bc5 100644 --- a/nipype/workflows/smri/freesurfer/recon.py +++ b/nipype/workflows/smri/freesurfer/recon.py @@ -4,7 +4,7 @@ from .autorecon1 import create_AutoRecon1 from .autorecon2 import create_AutoRecon2 from .autorecon3 import create_AutoRecon3 -from ....interfaces.freesurfer import AddXFormToHeader +from ....interfaces.freesurfer import AddXFormToHeader, Info from ....interfaces.io import DataSink from .utils import getdefaultconfig @@ -134,6 +134,39 @@ def create_reconall_workflow(name="ReconAll", plugin_args=None): run_without_submitting=True, name='inputspec') + # check freesurfer version and set parameters + fs_version_full = Info.version() + if 'v6.0' in fs_version_full or 'dev' in fs_version_full: + # assuming that dev is 6.0 + fsvernum = 6.0 + fs_version = 'v6.0' + th3 = True + shrink = 2 + distance = 200 # 3T should be 50 + stop = 0.0001 + exvivo = True + entorhinal = True + rb_date = "2014-08-21" + else: + # 5.3 is default + fsvernum = 5.3 + if 'v5.3' in fs_version_full: + fs_version = 'v5.3' + else: + fs_vesion = fs_version_full.split('-')[-1] + print("Warning: Workflow may not work properly if FREESURFER_HOME " + + "environmental variable is not set or if you are using an older " + + "version of FreeSurfer") + th3 = False + shrink = None + distance = 50 + stop = None + exvivo = False + entorhinal = False + rb_date = "2008-03-26" + + print("FreeSurfer Version: {0}".format(fs_version)) + def setconfig(reg_template=None, reg_template_withskull=None, lh_atlas=None, @@ -149,7 +182,8 @@ def setconfig(reg_template=None, color_table=None, lookup_table=None, wm_lookup_table=None, - awk_file=None): + awk_file=None, + rb_date=None): """Set optional configurations to the default""" from nipype.workflows.smri.freesurfer.utils import getdefaultconfig def checkarg(arg, default): @@ -158,7 +192,7 @@ def checkarg(arg, default): return arg else: return default - defaultconfig = getdefaultconfig(exitonfail=True) + defaultconfig = getdefaultconfig(exitonfail=True, rb_date=rb_date) # set the default template and classifier files reg_template = checkarg(reg_template, defaultconfig['registration_template']) reg_template_withskull = checkarg(reg_template_withskull, @@ -200,16 +234,20 @@ def checkarg(arg, default): 'wm_lookup_table', 'awk_file'] - config_node = pe.Node(niu.Function(params, + config_node = pe.Node(niu.Function(params + ['rb_date'], params, setconfig), name="config") + config_node.inputs.rb_date = rb_date + for param in params: reconall.connect(inputspec, param, config_node, param) # create AutoRecon1 - ar1_wf, ar1_outputs = create_AutoRecon1(plugin_args=plugin_args) + ar1_wf, ar1_outputs = create_AutoRecon1(plugin_args=plugin_args, stop=stop, + distance=distance, shrink=shrink, + fsvernum=fsvernum) # connect inputs for AutoRecon1 reconall.connect([(inputspec, ar1_wf, [('T1_files', 'inputspec.T1_files'), ('T2_file', 'inputspec.T2_file'), @@ -219,9 +257,9 @@ def checkarg(arg, default): (config_node, ar1_wf, [('reg_template_withskull', 'inputspec.reg_template_withskull'), ('awk_file', 'inputspec.awk_file')])]) - # create AutoRecon2 - ar2_wf, ar2_outputs = create_AutoRecon2(plugin_args=plugin_args) + ar2_wf, ar2_outputs = create_AutoRecon2(plugin_args=plugin_args, fsvernum=fsvernum, + stop=stop, shrink=shrink, distance=distance) # connect inputs for AutoRecon2 reconall.connect([(inputspec, ar2_wf, [('num_threads', 'inputspec.num_threads')]), (config_node, ar2_wf, [('reg_template_withskull', @@ -230,8 +268,14 @@ def checkarg(arg, default): (ar1_wf, ar2_wf, [('outputspec.brainmask', 'inputspec.brainmask'), ('outputspec.talairach', 'inputspec.transform'), ('outputspec.orig', 'inputspec.orig')])]) + + if fsvernum < 6: + reconall.connect([(ar1_wf, ar2_wf, [('outputspec.nu', 'inputspec.nu')])]) + # create AutoRecon3 - ar3_wf, ar3_outputs = create_AutoRecon3(plugin_args=plugin_args, th3=True) + ar3_wf, ar3_outputs = create_AutoRecon3(plugin_args=plugin_args, th3=th3, + exvivo=exvivo, entorhinal=entorhinal, + fsvernum=fsvernum) # connect inputs for AutoRecon3 reconall.connect([(config_node, ar3_wf, [('lh_atlas', 'inputspec.lh_atlas'), ('rh_atlas', 'inputspec.rh_atlas'), diff --git a/nipype/workflows/smri/freesurfer/utils.py b/nipype/workflows/smri/freesurfer/utils.py index 74d45776d2..65e352f5e5 100644 --- a/nipype/workflows/smri/freesurfer/utils.py +++ b/nipype/workflows/smri/freesurfer/utils.py @@ -420,7 +420,7 @@ def mkdir_p(path): else: raise -def getdefaultconfig(exitonfail=False): +def getdefaultconfig(exitonfail=False, rb_date="2014-08-21"): config = { 'custom_atlas' : None, 'cw256' : False, 'field_strength' : '1.5T', @@ -440,9 +440,9 @@ def getdefaultconfig(exitonfail=False): config['awk_file'] = os.path.join(config['fs_home'], 'bin', 'extract_talairach_avi_QA.awk') config['registration_template'] = os.path.join(config['fs_home'], 'average', - 'RB_all_2014-08-21.gca') + 'RB_all_{0}.gca'.format(rb_date)) config['registration_template_withskull'] = os.path.join(config['fs_home'], 'average', - 'RB_all_withskull_2014-08-21.gca') + 'RB_all_withskull_{0}.gca'.format(rb_date)) for hemi in ('lh', 'rh'): config['{0}_atlas'.format(hemi)] = os.path.join( config['fs_home'], 'average',