From 1c11e63dedd3660e227095da45ae680f3636d5c5 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Fri, 9 Sep 2011 12:55:18 -0400 Subject: [PATCH] sty: removed trailing spaces (thanks alex) --- Makefile | 5 +- build_docs.py | 30 +-- doc/sphinxext/docscrape.py | 10 +- doc/sphinxext/ipython_console_highlighting.py | 2 +- doc/sphinxext/numpydoc.py | 2 +- examples/camino_dti_tutorial.py | 2 +- examples/connectivity_tutorial.py | 2 +- examples/dartmouth_workshop_2010.py | 52 ++-- examples/dtk_dti_tutorial.py | 4 +- examples/dtk_odf_tutorial.py | 4 +- examples/freesurfer_tutorial.py | 16 +- .../frontiers_paper/smoothing_comparison.py | 68 +++--- .../frontiers_paper/workflow_from_scratch.py | 96 ++++---- examples/fsl_dti_tutorial.py | 2 +- examples/fsl_feeds_tutorial.py | 2 +- examples/fsl_resting_compcorr.py | 2 +- examples/nipy_tutorial.py | 4 +- examples/slicer_tutorial.py | 2 +- examples/spm_dartel_tutorial.py | 14 +- examples/tbss_tutorial.py | 2 +- nipype/__init__.py | 2 +- nipype/algorithms/misc.py | 146 +++++------ nipype/algorithms/modelgen.py | 22 +- nipype/algorithms/rapidart.py | 44 ++-- nipype/algorithms/tests/test_modelgen.py | 2 +- nipype/algorithms/tests/test_rapidart.py | 6 +- nipype/interfaces/afni/preprocess.py | 2 +- .../interfaces/afni/tests/test_preprocess.py | 6 +- nipype/interfaces/base.py | 18 +- nipype/interfaces/camino/connectivity.py | 2 +- nipype/interfaces/camino/convert.py | 2 +- nipype/interfaces/camino/dti.py | 20 +- nipype/interfaces/camino2trackvis/convert.py | 2 +- nipype/interfaces/cmtk/convert.py | 2 +- nipype/interfaces/dcm2nii.py | 39 ++- .../interfaces/diffusion_toolkit/__init__.py | 2 +- nipype/interfaces/diffusion_toolkit/base.py | 6 +- nipype/interfaces/diffusion_toolkit/dti.py | 28 +-- nipype/interfaces/diffusion_toolkit/odf.py | 44 ++-- .../interfaces/diffusion_toolkit/postproc.py | 10 +- nipype/interfaces/freesurfer/base.py | 34 +-- nipype/interfaces/freesurfer/model.py | 54 ++--- nipype/interfaces/freesurfer/preprocess.py | 78 +++--- .../interfaces/freesurfer/tests/test_model.py | 2 +- .../freesurfer/tests/test_preprocess.py | 18 +- .../interfaces/freesurfer/tests/test_utils.py | 10 +- nipype/interfaces/fsl/__init__.py | 6 +- nipype/interfaces/fsl/base.py | 18 +- nipype/interfaces/fsl/dti.py | 228 +++++++++--------- nipype/interfaces/fsl/maths.py | 14 +- nipype/interfaces/fsl/model.py | 64 ++--- nipype/interfaces/fsl/preprocess.py | 24 +- nipype/interfaces/fsl/tests/test_base.py | 8 +- nipype/interfaces/fsl/tests/test_dti.py | 24 +- nipype/interfaces/fsl/tests/test_maths.py | 28 +-- .../interfaces/fsl/tests/test_preprocess.py | 44 ++-- nipype/interfaces/fsl/tests/test_utils.py | 8 +- nipype/interfaces/fsl/utils.py | 36 +-- nipype/interfaces/io.py | 18 +- nipype/interfaces/matlab.py | 20 +- nipype/interfaces/nipy/preprocess.py | 18 +- nipype/interfaces/nitime/analysis.py | 62 ++--- nipype/interfaces/nitime/tests/test_nitime.py | 16 +- nipype/interfaces/rest.py | 8 +- nipype/interfaces/slicer.py | 62 ++--- nipype/interfaces/spm/base.py | 64 ++--- nipype/interfaces/spm/model.py | 88 +++---- nipype/interfaces/spm/preprocess.py | 84 +++---- nipype/interfaces/spm/tests/test_base.py | 8 +- nipype/interfaces/spm/tests/test_model.py | 6 +- .../interfaces/spm/tests/test_preprocess.py | 6 +- nipype/interfaces/spm/utils.py | 8 +- nipype/interfaces/tests/test_base.py | 18 +- nipype/interfaces/tests/test_io.py | 4 +- nipype/interfaces/tests/test_matlab.py | 12 +- nipype/interfaces/tests/test_utility.py | 6 +- nipype/interfaces/traits_extension.py | 8 +- nipype/interfaces/utility.py | 72 +++--- nipype/pipeline/engine.py | 56 ++--- nipype/pipeline/plugins/base.py | 4 +- nipype/pipeline/plugins/linear.py | 2 +- nipype/pipeline/plugins/multiproc.py | 2 +- nipype/pipeline/tests/test_engine.py | 9 +- nipype/pipeline/utils.py | 32 +-- nipype/testing/__init__.py | 6 +- nipype/utils/docparse.py | 26 +- nipype/utils/filemanip.py | 2 +- nipype/utils/ipy_profile_nipype.py | 2 +- nipype/utils/matlabtools.py | 4 +- nipype/utils/misc.py | 10 +- nipype/utils/onetime.py | 4 +- nipype/utils/spm_docs.py | 4 +- nipype/utils/tests/test_filemanip.py | 2 +- nipype/workflows/freesurfer/utils.py | 2 +- nipype/workflows/fsl/dti.py | 140 +++++------ nipype/workflows/fsl/preprocess.py | 26 +- nipype/workflows/fsl/tests/test_dti.py | 34 +-- nipype/workflows/setup.py | 2 +- nipype/workflows/spm/preprocess.py | 22 +- nipype/workflows/spm/tests/__init__.py | 1 - setup.py | 12 +- setup_egg.py | 4 +- tools/apigen.py | 18 +- tools/gitwash_dumper.py | 2 +- tools/interfacedocgen.py | 46 ++-- tools/nipype_nightly.py | 8 +- tools/report_coverage.py | 2 +- tools/run_examples.py | 2 +- tools/run_interface.py | 8 +- 109 files changed, 1252 insertions(+), 1254 deletions(-) diff --git a/Makefile b/Makefile index 77e0759e0f..7e72de2a07 100644 --- a/Makefile +++ b/Makefile @@ -53,7 +53,8 @@ inplace: $(PYTHON) setup.py build_ext -i test-code: in - $(NOSETESTS) -s nipype + $(NOSETESTS) -s nipype --with-doctest + test-doc: $(NOSETESTS) -s --with-doctest --doctest-tests --doctest-extension=rst \ --doctest-fixtures=_fixture doc/ @@ -62,5 +63,5 @@ test-coverage: $(NOSETESTS) -s --with-coverage --cover-html --cover-html-dir=coverage \ --cover-package=nipype nipype -test: test-code test-doc +test: test-code diff --git a/build_docs.py b/build_docs.py index 9045a6e2e9..157e4775fc 100644 --- a/build_docs.py +++ b/build_docs.py @@ -25,7 +25,7 @@ DOC_DOCTREES_DIR = os.path.join('doc', '_build', 'doctrees') ################################################################################ -# Distutils Command class for installing nipype to a temporary location. +# Distutils Command class for installing nipype to a temporary location. class TempInstall(Command): temp_install_dir = os.path.join('build', 'install') @@ -34,11 +34,11 @@ def run(self): install = self.distribution.get_command_obj('install') install.install_scripts = self.temp_install_dir install.install_base = self.temp_install_dir - install.install_platlib = self.temp_install_dir - install.install_purelib = self.temp_install_dir - install.install_data = self.temp_install_dir - install.install_lib = self.temp_install_dir - install.install_headers = self.temp_install_dir + install.install_platlib = self.temp_install_dir + install.install_purelib = self.temp_install_dir + install.install_data = self.temp_install_dir + install.install_lib = self.temp_install_dir + install.install_headers = self.temp_install_dir install.run() # Horrible trick to reload nipype with our temporary instal @@ -52,13 +52,13 @@ def run(self): def initialize_options(self): pass - + def finalize_options(self): pass ################################################################################ -# Distutils Command class for API generation +# Distutils Command class for API generation class APIDocs(TempInstall): description = \ """generate API docs """ @@ -99,7 +99,7 @@ def relative_path(filename): ################################################################################ -# Distutils Command class build the docs +# Distutils Command class build the docs class MyBuildDoc(BuildDoc): """ Sub-class the standard sphinx documentation building system, to add logics for API generation and matplotlib's plot directive. @@ -121,7 +121,7 @@ def run(self): # in case I'm missing something? BuildDoc.run(self) self.zip_docs() - + def zip_docs(self): if not os.path.exists(DOC_BUILD_DIR): raise OSError, 'Doc directory does not exist.' @@ -131,19 +131,19 @@ def zip_docs(self): # for it. ZIP_STORED produces an uncompressed zip, but does not # require zlib. try: - zf = zipfile.ZipFile(target_file, 'w', + zf = zipfile.ZipFile(target_file, 'w', compression=zipfile.ZIP_DEFLATED) except RuntimeError: warnings.warn('zlib not installed, storing the docs ' 'without compression') - zf = zipfile.ZipFile(target_file, 'w', - compression=zipfile.ZIP_STORED) + zf = zipfile.ZipFile(target_file, 'w', + compression=zipfile.ZIP_STORED) for root, dirs, files in os.walk(DOC_BUILD_DIR): relative = relative_path(root) if not relative.startswith('.doctrees'): for f in files: - zf.write(os.path.join(root, f), + zf.write(os.path.join(root, f), os.path.join(relative, 'html_docs', f)) zf.close() @@ -171,7 +171,7 @@ def run(self): print "Removing %s" % interface_path shutil.rmtree(interface_path) if os.path.exists(DOC_BUILD_DIR): - print "Removing %s" % DOC_BUILD_DIR + print "Removing %s" % DOC_BUILD_DIR shutil.rmtree(DOC_BUILD_DIR) if os.path.exists(DOC_DOCTREES_DIR): print "Removing %s" % DOC_DOCTREES_DIR diff --git a/doc/sphinxext/docscrape.py b/doc/sphinxext/docscrape.py index d5187565fb..723aafac94 100644 --- a/doc/sphinxext/docscrape.py +++ b/doc/sphinxext/docscrape.py @@ -185,7 +185,7 @@ def _parse_param_list(self,content): return params - + _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) def _parse_see_also(self, content): @@ -218,7 +218,7 @@ def push_item(name, rest): current_func = None rest = [] - + for line in content: if not line.strip(): continue @@ -260,7 +260,7 @@ def strip_each_in(lst): if len(line) > 2: out[line[1]] = strip_each_in(line[2].split(',')) return out - + def _parse_summary(self): """Grab signature (if given) and summary""" if self._is_at_section(): @@ -277,7 +277,7 @@ def _parse_summary(self): if not self._is_at_section(): self['Extended Summary'] = self._read_to_next_section() - + def _parse(self): self._doc.reset() self._parse_summary() @@ -442,7 +442,7 @@ def get_func(self): else: func = self._f return func, func_name - + def __str__(self): out = '' diff --git a/doc/sphinxext/ipython_console_highlighting.py b/doc/sphinxext/ipython_console_highlighting.py index ec2cc1faaf..4c3eba885b 100644 --- a/doc/sphinxext/ipython_console_highlighting.py +++ b/doc/sphinxext/ipython_console_highlighting.py @@ -43,7 +43,7 @@ class IPythonConsoleLexer(Lexer): - It assumes the default IPython prompts, not customized ones. """ - + name = 'IPython console session' aliases = ['ipython'] mimetypes = ['text/x-ipython-console'] diff --git a/doc/sphinxext/numpydoc.py b/doc/sphinxext/numpydoc.py index 89a843684d..01a4337f69 100644 --- a/doc/sphinxext/numpydoc.py +++ b/doc/sphinxext/numpydoc.py @@ -90,7 +90,7 @@ def initialize(app): def setup(app, get_doc_object_=get_doc_object): global get_doc_object get_doc_object = get_doc_object_ - + app.connect('autodoc-process-docstring', mangle_docstrings) app.connect('builder-inited', initialize) app.add_config_value('numpydoc_edit_link', None, True) diff --git a/examples/camino_dti_tutorial.py b/examples/camino_dti_tutorial.py index 146b0781bc..2249accdc4 100644 --- a/examples/camino_dti_tutorial.py +++ b/examples/camino_dti_tutorial.py @@ -136,7 +136,7 @@ def get_affine(volume): """ In this tutorial we implement probabilistic tractography using the PICo algorithm. -PICo tractography requires an estimate of the fibre direction and a model of its +PICo tractography requires an estimate of the fibre direction and a model of its uncertainty in each voxel; this is produced using the following node. """ diff --git a/examples/connectivity_tutorial.py b/examples/connectivity_tutorial.py index a6503b4b34..0fcc26474b 100644 --- a/examples/connectivity_tutorial.py +++ b/examples/connectivity_tutorial.py @@ -29,7 +29,7 @@ These are written by Stephan Gerhard and can be obtained from: http://www.cmtk.org/ - + Or on github at: CFFlib: https://github.com/LTS5/cfflib diff --git a/examples/dartmouth_workshop_2010.py b/examples/dartmouth_workshop_2010.py index 8a14d844f6..860a258982 100644 --- a/examples/dartmouth_workshop_2010.py +++ b/examples/dartmouth_workshop_2010.py @@ -1,32 +1,32 @@ """ ================================= - Dartmouth College Workshop 2010 + Dartmouth College Workshop 2010 ================================= -First lets go to the directory with the data we'll be working on and start the interactive python interpreter -(with some nipype specific configuration). Note that nipype does not need to be run through ipython - it is +First lets go to the directory with the data we'll be working on and start the interactive python interpreter +(with some nipype specific configuration). Note that nipype does not need to be run through ipython - it is just much nicer to do interactive work in it. .. sourcecode:: bash - + cd $TDPATH ipython -p nipype - -For every neuroimaging procedure supported by nipype there exists a wrapper - a small piece of code managing + +For every neuroimaging procedure supported by nipype there exists a wrapper - a small piece of code managing the underlying software (FSL, SPM, AFNI etc.). We call those interfaces. They are standarised so we can hook them up together. Lets have a look at some of them. - + .. sourcecode:: ipython In [1]: import nipype.interfaces.fsl as fsl - + In [2]: fsl.BET.help() Inputs ------ - + Mandatory: in_file: input file to skull strip - + Optional: args: Additional parameters to the command center: center of gravity in voxels @@ -46,7 +46,7 @@ skull: create skull image threshold: apply thresholding to segmented brain image and mask vertical_gradient: vertical gradient in fractional intensity threshold (-1, 1) - + Outputs ------- mask_file: path/name of binary brain mask (if generated) @@ -59,16 +59,16 @@ In [4]: fs.Smooth.help() Inputs ------ - + Mandatory: in_file: source volume num_iters: number of iterations instead of fwhm mutually exclusive: surface_fwhm - reg_file: registers volume to surface anatomical + reg_file: registers volume to surface anatomical surface_fwhm: surface FWHM in mm mutually exclusive: num_iters requires: reg_file - + Optional: args: Additional parameters to the command environ: Environment variables (default={}) @@ -79,15 +79,15 @@ smoothed_file: output volume subjects_dir: subjects directory vol_fwhm: volumesmoothing outside of surface - + Outputs ------- args: Additional parameters to the command environ: Environment variables smoothed_file: smoothed input volume subjects_dir: subjects directory - -You can read about all of the interfaces implemented in nipype at our online documentation at http://nipy.sourceforge.net/nipype/documentation.html#documentation . + +You can read about all of the interfaces implemented in nipype at our online documentation at http://nipy.sourceforge.net/nipype/documentation.html#documentation . Check it out now. Using interfaces @@ -101,7 +101,7 @@ print result """ -Running a single program is not much of a breakthrough. Lets run motion correction followed by smoothing +Running a single program is not much of a breakthrough. Lets run motion correction followed by smoothing (isotropic - in other words not using SUSAN). Notice that in the first line we are setting the output data type for all FSL interfaces. """ @@ -132,7 +132,7 @@ motion_correct_and_smooth.base_dir = os.path.abspath('.') # define where will be the root folder for the workflow motion_correct_and_smooth.connect([ (motion_correct, smooth, [('out_file', 'in_file')]) - ]) + ]) # we are connecting 'out_file' output of motion_correct to 'in_file' input of smooth motion_correct_and_smooth.run() @@ -140,7 +140,7 @@ Another workflow ---------------- -Another example of a simple workflow (calculate the mean of fMRI signal and subtract it). +Another example of a simple workflow (calculate the mean of fMRI signal and subtract it). This time we'll be assigning inputs after defining the workflow. """ @@ -205,7 +205,7 @@ Datasink is a special interface for copying and arranging results. """ - + import nipype.interfaces.io as nio preproc.inputs.inputspec.func = os.path.abspath('data/s1/f3.nii') @@ -218,15 +218,15 @@ ('maskfunc3.out_file', 'funcruns')]) ]) preprocess.run() - + """ Datagrabber ----------- Datagrabber is (surprise, surprise) an interface for collecting files from hard drive. It is very flexible and -supports almost any file organisation of your data you can imagine. +supports almost any file organisation of your data you can imagine. """ - + datasource1 = nio.DataGrabber() datasource1.inputs.template = 'data/s1/f3.nii' results = datasource1.run() @@ -248,13 +248,13 @@ datasource4.inputs.run = [3, 7] datasource4.inputs.subject_id = ['s1', 's3'] results = datasource4.run() -print results.outputs +print results.outputs """ Iterables --------- -Iterables is a special field of the Node class that enables to iterate all workfloes/nodes connected to it over +Iterables is a special field of the Node class that enables to iterate all workfloes/nodes connected to it over some parameters. Here we'll use it to iterate over two subjects. """ diff --git a/examples/dtk_dti_tutorial.py b/examples/dtk_dti_tutorial.py index ae80ac0318..566a51d80f 100644 --- a/examples/dtk_dti_tutorial.py +++ b/examples/dtk_dti_tutorial.py @@ -22,7 +22,7 @@ import nipype.interfaces.io as nio # Data i/o import nipype.interfaces.fsl as fsl # fsl import nipype.workflows.fsl as fsl_wf # fsl -import nipype.interfaces.diffusion_toolkit as dtk +import nipype.interfaces.diffusion_toolkit as dtk import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import os # system functions @@ -102,7 +102,7 @@ datasource.inputs.template = "%s/%s" -# This needs to point to the fdt folder you can find after extracting +# This needs to point to the fdt folder you can find after extracting # http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz datasource.inputs.base_directory = os.path.abspath('fsl_course_data/fdt/') diff --git a/examples/dtk_odf_tutorial.py b/examples/dtk_odf_tutorial.py index bf46495d48..2de0b874bb 100644 --- a/examples/dtk_odf_tutorial.py +++ b/examples/dtk_odf_tutorial.py @@ -22,7 +22,7 @@ import nipype.interfaces.io as nio # Data i/o import nipype.interfaces.fsl as fsl # fsl import nipype.workflows.fsl as fsl_wf # fsl -import nipype.interfaces.diffusion_toolkit as dtk +import nipype.interfaces.diffusion_toolkit as dtk import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import os # system functions @@ -102,7 +102,7 @@ datasource.inputs.template = "%s/%s" -# This needs to point to the fdt folder you can find after extracting +# This needs to point to the fdt folder you can find after extracting # http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz datasource.inputs.base_directory = os.path.abspath('data') diff --git a/examples/freesurfer_tutorial.py b/examples/freesurfer_tutorial.py index c57f79c8da..151391ea75 100644 --- a/examples/freesurfer_tutorial.py +++ b/examples/freesurfer_tutorial.py @@ -9,7 +9,7 @@ cortical data using FreeSurfer_ and then perform firstlevel model and contrast estimation using SPM_. A surface-based second level glm illustrates the use of spherical registration and freesurfer's glm -functions. +functions. Preparing environment ===================== @@ -24,15 +24,15 @@ Step 1 ------ Link the *fsaverage* directory for your freesurfer distribution. To do -this type: +this type: :: cd nipype-tutorial/fsdata ln -s $FREESURFER_HOME/subjects/fsaverage cd .. - - + + Defining the workflow ===================== @@ -60,7 +60,7 @@ These are currently being set at the class level, so every node will inherit these settings. However, these can also be changed or set for an individual -node. +node. """ # Tell freesurfer what subjects directory to use @@ -128,7 +128,7 @@ ApplyVolTransform = pe.Node(interface=fs.ApplyVolTransform(), name='applyreg') -ApplyVolTransform.inputs.inverse = True +ApplyVolTransform.inputs.inverse = True """ Use :class:`nipype.interfaces.freesurfer.Binarize` to extract a binary brain @@ -156,7 +156,7 @@ thickness of cortex with the smoothed data. The smoothing is not performed in a depth specific manner. The output of this branch should only be used for surface-based analysis and visualization. - + """ volsmooth = pe.Node(interface=spm.Smooth(), name = "volsmooth") @@ -506,7 +506,7 @@ def subjectinfo(subject_id): from nodes are piped into appropriate inputs of other nodes. Use the :class:`nipype.pipeline.engine.Workfow` to create a -graph-based execution pipeline for first level analysis. +graph-based execution pipeline for first level analysis. """ level1 = pe.Workflow(name="level1") diff --git a/examples/frontiers_paper/smoothing_comparison.py b/examples/frontiers_paper/smoothing_comparison.py index fee556f43a..8d70aaa789 100644 --- a/examples/frontiers_paper/smoothing_comparison.py +++ b/examples/frontiers_paper/smoothing_comparison.py @@ -18,36 +18,36 @@ preprocessing = pe.Workflow(name="preprocessing") -iter_fwhm = pe.Node(interface=util.IdentityInterface(fields=["fwhm"]), +iter_fwhm = pe.Node(interface=util.IdentityInterface(fields=["fwhm"]), name="iter_fwhm") iter_fwhm.iterables = [('fwhm', [4, 8])] -iter_smoothing_method = pe.Node(interface=util.IdentityInterface(fields=["smoothing_method"]), +iter_smoothing_method = pe.Node(interface=util.IdentityInterface(fields=["smoothing_method"]), name="iter_smoothing_method") -iter_smoothing_method.iterables = [('smoothing_method',['isotropic_voxel', - 'anisotropic_voxel', +iter_smoothing_method.iterables = [('smoothing_method',['isotropic_voxel', + 'anisotropic_voxel', 'isotropic_surface'])] realign = pe.Node(interface=spm.Realign(), name="realign") realign.inputs.register_to_mean = True -isotropic_voxel_smooth = pe.Node(interface=spm.Smooth(), +isotropic_voxel_smooth = pe.Node(interface=spm.Smooth(), name="isotropic_voxel_smooth") -preprocessing.connect(realign, "realigned_files", isotropic_voxel_smooth, +preprocessing.connect(realign, "realigned_files", isotropic_voxel_smooth, "in_files") preprocessing.connect(iter_fwhm, "fwhm", isotropic_voxel_smooth, "fwhm") compute_mask = pe.Node(interface=nipy.ComputeMask(), name="compute_mask") preprocessing.connect(realign, "mean_image", compute_mask, "mean_volume") -anisotropic_voxel_smooth = fsl_wf.create_susan_smooth(name="anisotropic_voxel_smooth", +anisotropic_voxel_smooth = fsl_wf.create_susan_smooth(name="anisotropic_voxel_smooth", separate_masks=False) anisotropic_voxel_smooth.inputs.smooth.output_type = 'NIFTI' -preprocessing.connect(realign, "realigned_files", anisotropic_voxel_smooth, +preprocessing.connect(realign, "realigned_files", anisotropic_voxel_smooth, "inputnode.in_files") -preprocessing.connect(iter_fwhm, "fwhm", anisotropic_voxel_smooth, +preprocessing.connect(iter_fwhm, "fwhm", anisotropic_voxel_smooth, "inputnode.fwhm") -preprocessing.connect(compute_mask, "brain_mask", anisotropic_voxel_smooth, +preprocessing.connect(compute_mask, "brain_mask", anisotropic_voxel_smooth, 'inputnode.mask_file') @@ -64,38 +64,38 @@ isotropic_surface_smooth = pe.MapNode(interface=fs.Smooth(proj_frac_avg=(0,1,0.1)), iterfield=['in_file'], name="isotropic_surface_smooth") -preprocessing.connect(surfregister, 'out_reg_file', isotropic_surface_smooth, +preprocessing.connect(surfregister, 'out_reg_file', isotropic_surface_smooth, 'reg_file') -preprocessing.connect(realign, "realigned_files", isotropic_surface_smooth, +preprocessing.connect(realign, "realigned_files", isotropic_surface_smooth, "in_file") -preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth, +preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth, "surface_fwhm") preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth, "vol_fwhm") -preprocessing.connect(recon_all, 'subjects_dir', isotropic_surface_smooth, +preprocessing.connect(recon_all, 'subjects_dir', isotropic_surface_smooth, 'subjects_dir') merge_smoothed_files = pe.Node(interface=util.Merge(3), name='merge_smoothed_files') -preprocessing.connect(isotropic_voxel_smooth, 'smoothed_files', +preprocessing.connect(isotropic_voxel_smooth, 'smoothed_files', merge_smoothed_files, 'in1') -preprocessing.connect(anisotropic_voxel_smooth, 'outputnode.smoothed_files', +preprocessing.connect(anisotropic_voxel_smooth, 'outputnode.smoothed_files', merge_smoothed_files, 'in2') -preprocessing.connect(isotropic_surface_smooth, 'smoothed_file', +preprocessing.connect(isotropic_surface_smooth, 'smoothed_file', merge_smoothed_files, 'in3') - -select_smoothed_files = pe.Node(interface=util.Select(), + +select_smoothed_files = pe.Node(interface=util.Select(), name="select_smoothed_files") -preprocessing.connect(merge_smoothed_files, 'out', select_smoothed_files, +preprocessing.connect(merge_smoothed_files, 'out', select_smoothed_files, 'inlist') - + def chooseindex(roi): - return {'isotropic_voxel':range(0,4), 'anisotropic_voxel':range(4,8), + return {'isotropic_voxel':range(0,4), 'anisotropic_voxel':range(4,8), 'isotropic_surface':range(8,12)}[roi] - -preprocessing.connect(iter_smoothing_method, ("smoothing_method", chooseindex), + +preprocessing.connect(iter_smoothing_method, ("smoothing_method", chooseindex), select_smoothed_files, 'index') -rename = pe.MapNode(util.Rename(format_string="%(orig)s"), name="rename", +rename = pe.MapNode(util.Rename(format_string="%(orig)s"), name="rename", iterfield=['in_file']) rename.inputs.parse_string = "(?P.*)" @@ -118,9 +118,9 @@ def chooseindex(roi): level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical' : 1} -contrastestimate = pe.Node(interface = spm.EstimateContrast(), +contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate") -contrastestimate.inputs.contrasts = [('Task>Baseline','T', +contrastestimate.inputs.contrasts = [('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5])] modelling = pe.Workflow(name="modelling") @@ -134,11 +134,11 @@ def chooseindex(roi): main_workflow = pe.Workflow(name="main_workflow") main_workflow.base_dir = "smoothing_comparison_workflow" -main_workflow.connect(preprocessing, "realign.realignment_parameters", +main_workflow.connect(preprocessing, "realign.realignment_parameters", modelling, "specify_model.realignment_parameters") -main_workflow.connect(preprocessing, "select_smoothed_files.out", +main_workflow.connect(preprocessing, "select_smoothed_files.out", modelling, "specify_model.functional_runs") -main_workflow.connect(preprocessing, "compute_mask.brain_mask", +main_workflow.connect(preprocessing, "compute_mask.brain_mask", modelling, "level1design.mask_image") datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], @@ -146,22 +146,22 @@ def chooseindex(roi): name = 'datasource') datasource.inputs.base_directory = os.path.abspath('data') datasource.inputs.template = '%s/%s.nii' -datasource.inputs.template_args = info = dict(func=[['subject_id', +datasource.inputs.template_args = info = dict(func=[['subject_id', ['f3','f5','f7','f10']]], struct=[['subject_id','struct']]) datasource.inputs.subject_id = 's1' main_workflow.connect(datasource, 'func', preprocessing, 'realign.in_files') -main_workflow.connect(datasource, 'struct', preprocessing, +main_workflow.connect(datasource, 'struct', preprocessing, 'recon_all.T1_files') datasink = pe.Node(interface=nio.DataSink(), name="datasink") datasink.inputs.base_directory = os.path.abspath('smoothing_comparison_workflow/output') datasink.inputs.regexp_substitutions = [("_rename[0-9]", "")] -main_workflow.connect(modelling, 'contrastestimate.spmT_images', datasink, +main_workflow.connect(modelling, 'contrastestimate.spmT_images', datasink, 'contrasts') -main_workflow.connect(preprocessing, 'rename.out_file', datasink, +main_workflow.connect(preprocessing, 'rename.out_file', datasink, 'smoothed_epi') main_workflow.run() diff --git a/examples/frontiers_paper/workflow_from_scratch.py b/examples/frontiers_paper/workflow_from_scratch.py index 8bd93688cc..cdb1226a83 100644 --- a/examples/frontiers_paper/workflow_from_scratch.py +++ b/examples/frontiers_paper/workflow_from_scratch.py @@ -13,23 +13,23 @@ import os # system functions -"""In the following section, to showcase NiPyPe, we will describe how to create -and extend a typical fMRI processing pipeline. We will begin with a basic -processing layout and follow with extending it by adding/exchanging different +"""In the following section, to showcase NiPyPe, we will describe how to create +and extend a typical fMRI processing pipeline. We will begin with a basic +processing layout and follow with extending it by adding/exchanging different components. -Most fMRI pipeline can be divided into two sections - preprocessing and -modelling. First one deals with cleaning data from confounds and noise and the -second one fits a model based on the experimental design. Preprocessing stage -in our first iteration of a pipeline will consist of only two steps: -realignment and smoothing. In NiPyPe Every processing step consist of an -Interface (which defines how to execute corresponding software) encapsulated -in a Node (which defines for example a unique name). For realignment (motion -correction achieved by coregistering all volumes to the mean) and smoothing -(convolution with 3D Gaussian kernel) we will use SPM implementation. -Definition of appropriate nodes can be found in Listing 1 (TODO). Inputs -(such as register_to_mean from listing 1) of nodes are accessible through the -inputs property. Upon setting any input its type is verified to avoid errors +Most fMRI pipeline can be divided into two sections - preprocessing and +modelling. First one deals with cleaning data from confounds and noise and the +second one fits a model based on the experimental design. Preprocessing stage +in our first iteration of a pipeline will consist of only two steps: +realignment and smoothing. In NiPyPe Every processing step consist of an +Interface (which defines how to execute corresponding software) encapsulated +in a Node (which defines for example a unique name). For realignment (motion +correction achieved by coregistering all volumes to the mean) and smoothing +(convolution with 3D Gaussian kernel) we will use SPM implementation. +Definition of appropriate nodes can be found in Listing 1 (TODO). Inputs +(such as register_to_mean from listing 1) of nodes are accessible through the +inputs property. Upon setting any input its type is verified to avoid errors during the execution.""" realign = pe.Node(interface=spm.Realign(), name="realign") @@ -38,24 +38,24 @@ smooth = pe.Node(interface=spm.Smooth(), name="smooth") smooth.inputs.fwhm = 4 -"""To connect two nodes a Workflow has to be created. connect() method of a -Workflow allows to specify which outputs of which Nodes should be connected to -which inputs of which Nodes (see Listing 2). By connecting realigned_files -output of realign to in_files input of Smooth we have created a simple +"""To connect two nodes a Workflow has to be created. connect() method of a +Workflow allows to specify which outputs of which Nodes should be connected to +which inputs of which Nodes (see Listing 2). By connecting realigned_files +output of realign to in_files input of Smooth we have created a simple preprocessing workflow (see Figure TODO).""" preprocessing = pe.Workflow(name="preprocessing") preprocessing.connect(realign, "realigned_files", smooth, "in_files") -"""Creating a modelling workflow which will define the design, estimate model -and contrasts follows the same suite. We will again use SPM implementations. -NiPyPe, however, adds extra abstraction layer to model definition which allows -using the same definition for many model estimation implemantations (for -example one from FSL or nippy). Therefore we will need four nodes: -SpecifyModel (NiPyPe specific abstraction layer), Level1Design (SPM design -definition), ModelEstimate, and ContrastEstimate. The connected modelling -Workflow can be seen on Figure TODO. Model specification supports block, event -and sparse designs. Contrasts provided to ContrastEstimate are defined using +"""Creating a modelling workflow which will define the design, estimate model +and contrasts follows the same suite. We will again use SPM implementations. +NiPyPe, however, adds extra abstraction layer to model definition which allows +using the same definition for many model estimation implemantations (for +example one from FSL or nippy). Therefore we will need four nodes: +SpecifyModel (NiPyPe specific abstraction layer), Level1Design (SPM design +definition), ModelEstimate, and ContrastEstimate. The connected modelling +Workflow can be seen on Figure TODO. Model specification supports block, event +and sparse designs. Contrasts provided to ContrastEstimate are defined using the same names of regressors as defined in the SpecifyModel.""" specify_model = pe.Node(interface=model.SpecifyModel(), name="specify_model") @@ -75,7 +75,7 @@ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical' : 1} -contrastestimate = pe.Node(interface = spm.EstimateContrast(), +contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate") cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]) cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]) @@ -84,33 +84,33 @@ modelling = pe.Workflow(name="modelling") modelling.connect(specify_model, 'session_info', level1design, 'session_info') modelling.connect(level1design, 'spm_mat_file', level1estimate, 'spm_mat_file') -modelling.connect(level1estimate,'spm_mat_file', +modelling.connect(level1estimate,'spm_mat_file', contrastestimate,'spm_mat_file') modelling.connect(level1estimate,'beta_images', contrastestimate,'beta_images') -modelling.connect(level1estimate,'residual_image', +modelling.connect(level1estimate,'residual_image', contrastestimate,'residual_image') -"""Having preprocessing and modelling workflows we need to connect them -together, add data grabbing facility and save the results. For this we will -create a master Workflow which will host preprocessing and model Workflows as -well as DataGrabber and DataSink Nodes. NiPyPe allows connecting Nodes between -Workflows. We will use this feature to connect realignment_parameters and +"""Having preprocessing and modelling workflows we need to connect them +together, add data grabbing facility and save the results. For this we will +create a master Workflow which will host preprocessing and model Workflows as +well as DataGrabber and DataSink Nodes. NiPyPe allows connecting Nodes between +Workflows. We will use this feature to connect realignment_parameters and smoothed_files to modelling workflow.""" main_workflow = pe.Workflow(name="main_workflow") main_workflow.base_dir = "workflow_from_scratch" -main_workflow.connect(preprocessing, "realign.realignment_parameters", +main_workflow.connect(preprocessing, "realign.realignment_parameters", modelling, "specify_model.realignment_parameters") -main_workflow.connect(preprocessing, "smooth.smoothed_files", +main_workflow.connect(preprocessing, "smooth.smoothed_files", modelling, "specify_model.functional_runs") -"""DataGrabber allows to define flexible search patterns which can be -parameterized by user defined inputs (such as subject ID, session etc.). -This allows to adapt to a wide range of file layouts. In our case we will -parameterize it with subject ID. In this way we will be able to run it for -different subjects. We can automate this by iterating over a list of subject -Ids, by setting an iterables property on the subject_id input of DataGrabber. +"""DataGrabber allows to define flexible search patterns which can be +parameterized by user defined inputs (such as subject ID, session etc.). +This allows to adapt to a wide range of file layouts. In our case we will +parameterize it with subject ID. In this way we will be able to run it for +different subjects. We can automate this by iterating over a list of subject +Ids, by setting an iterables property on the subject_id input of DataGrabber. Its output will be connected to realignment node from preprocessing workflow.""" datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], @@ -118,20 +118,20 @@ name = 'datasource') datasource.inputs.base_directory = os.path.abspath('data') datasource.inputs.template = '%s/%s.nii' -datasource.inputs.template_args = dict(func=[['subject_id', +datasource.inputs.template_args = dict(func=[['subject_id', ['f3','f5','f7','f10']]]) datasource.inputs.subject_id = 's1' main_workflow.connect(datasource, 'func', preprocessing, 'realign.in_files') -"""DataSink on the other side provides means to storing selected results to a -specified location. It supports automatic creation of folder stricter and +"""DataSink on the other side provides means to storing selected results to a +specified location. It supports automatic creation of folder stricter and regular expression based substitutions. In this example we will store T maps.""" datasink = pe.Node(interface=nio.DataSink(), name="datasink") datasink.inputs.base_directory = os.path.abspath('workflow_from_scratch/output') -main_workflow.connect(modelling, 'contrastestimate.spmT_images', +main_workflow.connect(modelling, 'contrastestimate.spmT_images', datasink, 'contrasts.@T') main_workflow.run() diff --git a/examples/fsl_dti_tutorial.py b/examples/fsl_dti_tutorial.py index f0e22f27e2..779dfb6e47 100644 --- a/examples/fsl_dti_tutorial.py +++ b/examples/fsl_dti_tutorial.py @@ -110,7 +110,7 @@ datasource.inputs.template = "%s/%s" -# This needs to point to the fdt folder you can find after extracting +# This needs to point to the fdt folder you can find after extracting # http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz datasource.inputs.base_directory = os.path.abspath('fsl_course_data/fdt/') diff --git a/examples/fsl_feeds_tutorial.py b/examples/fsl_feeds_tutorial.py index a18805e07d..42e8135817 100644 --- a/examples/fsl_feeds_tutorial.py +++ b/examples/fsl_feeds_tutorial.py @@ -221,7 +221,7 @@ def getthreshop(thresh): preproc.connect(meanfunc2,'out_file', mergenode, 'in1') preproc.connect(medianval,'out_stat', mergenode, 'in2') - + """ Smooth each run using SUSAN with the brightness threshold set to 75% of the median value for each run and a mask consituting the mean functional diff --git a/examples/fsl_resting_compcorr.py b/examples/fsl_resting_compcorr.py index 386dee9205..f8de2be66c 100644 --- a/examples/fsl_resting_compcorr.py +++ b/examples/fsl_resting_compcorr.py @@ -40,7 +40,7 @@ infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") -"""Here we set up iteration over all the subjects. +"""Here we set up iteration over all the subjects. """ infosource.iterables = ('subject_id', subject_list) diff --git a/examples/nipy_tutorial.py b/examples/nipy_tutorial.py index 2548a70134..30163d82bf 100644 --- a/examples/nipy_tutorial.py +++ b/examples/nipy_tutorial.py @@ -6,7 +6,7 @@ ==================================== -The nipy_tutorial.py integrates several interfaces to perform a first level +The nipy_tutorial.py integrates several interfaces to perform a first level analysis on a two-subject data set. It is very similar to the spm_tutorial with the difference of using nipy for fitting GLM model and estimating contrasts. The tutorial can @@ -184,7 +184,7 @@ def subjectinfo(subject_id): contrasts = [cont1,cont2] """Generate design information using -:class:`nipype.interfaces.spm.SpecifyModel`. nipy accepts only design specified +:class:`nipype.interfaces.spm.SpecifyModel`. nipy accepts only design specified in seconds so "output_units" has always have to be set to "secs". """ diff --git a/examples/slicer_tutorial.py b/examples/slicer_tutorial.py index ca447e8f21..68d6c6d588 100644 --- a/examples/slicer_tutorial.py +++ b/examples/slicer_tutorial.py @@ -118,6 +118,6 @@ (datasource,reslice,[('func','inputVolume')]), (datasource,reslice,[('struct','referenceVolume')]) ]) - + pipeline.run() pipeline.write_graph() diff --git a/examples/spm_dartel_tutorial.py b/examples/spm_dartel_tutorial.py index 0c3fe74023..b3e120cded 100644 --- a/examples/spm_dartel_tutorial.py +++ b/examples/spm_dartel_tutorial.py @@ -269,7 +269,7 @@ datasource_dartel = pe.MapNode(interface=nio.DataGrabber(infields=['subject_id'], outfields=['struct']), - name = 'datasource_dartel', + name = 'datasource_dartel', iterfield = ['subject_id']) datasource_dartel.inputs.base_directory = data_dir datasource_dartel.inputs.template = '%s/%s.nii' @@ -281,7 +281,7 @@ This way we will be able to pick the right field flows later. """ -rename_dartel = pe.MapNode(util.Rename(format_string="subject_id_%(subject_id)s_struct"), +rename_dartel = pe.MapNode(util.Rename(format_string="subject_id_%(subject_id)s_struct"), iterfield=['in_file', 'subject_id'], name = 'rename_dartel') rename_dartel.inputs.subject_id = subject_list @@ -299,9 +299,9 @@ def pickFieldFlow(dartel_flow_fields, subject_id): _, name, _ = split_filename(f) if name.find("subject_id_%s"%subject_id): return f - + raise Exception - + pick_flow = pe.Node(util.Function(input_names=['dartel_flow_fields', 'subject_id'], output_names=['dartel_flow_field'], function = pickFieldFlow), name = "pick_flow") @@ -392,7 +392,7 @@ def subjectinfo(subject_id): level1.connect([(datasource_dartel, rename_dartel, [('struct', 'in_file')]), (rename_dartel, dartel_workflow, [('out_file','inputspec.structural_files')]), - + (infosource, datasource, [('subject_id', 'subject_id')]), (datasource,l1pipeline,[('func','preproc.realign.in_files'), ('struct', 'preproc.coregister.target'), @@ -519,5 +519,5 @@ def getstripdir(subject_id): if __name__ == '__main__': l2pipeline.run() - - + + diff --git a/examples/tbss_tutorial.py b/examples/tbss_tutorial.py index cb63234af3..3e707478ab 100644 --- a/examples/tbss_tutorial.py +++ b/examples/tbss_tutorial.py @@ -1,6 +1,6 @@ """ XXX Currently not WORKING in release 0.3, - Check for updates + Check for updates A pipeline example that uses several interfaces to perform analysis on diffusion weighted images using diff --git a/nipype/__init__.py b/nipype/__init__.py index 690e1165b8..68c5721973 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -32,7 +32,7 @@ b. execute workflows in parallel using IPython's parallel computing interface - + c. tools for interfacing databases, repositories d. tools for provenance tracking diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index 716b8f8dee..b450cc5706 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -34,7 +34,7 @@ class PickAtlasInputSpec(BaseInterfaceInputSpec): atlas = File(exists=True, desc="Location of the atlas that will be used.", mandatory=True) - labels = traits.Either(traits.Int, traits.List(traits.Int), + labels = traits.Either(traits.Int, traits.List(traits.Int), desc="Labels of regions that will be included in the mask. Must be \ compatible with the atlas used.", compulsory=True) hemi = traits.Enum('both','left','right', desc="Restrict the mask to only one hemisphere: left or right", usedefault=True) @@ -55,7 +55,7 @@ class PickAtlas(BaseInterface): def _run_interface(self, runtime): nim = self._get_brodmann_area() nb.save(nim, self._gen_output_filename()) - + return runtime def _gen_output_filename(self): @@ -65,12 +65,12 @@ def _gen_output_filename(self): else: output = os.path.realpath(self.inputs.output_file) return output - + def _get_brodmann_area(self): nii = nb.load(self.inputs.atlas) origdata = nii.get_data() newdata = np.zeros(origdata.shape) - + if not isinstance(self.inputs.labels, list): labels = [self.inputs.labels] else: @@ -93,36 +93,36 @@ def _list_outputs(self): outputs = self._outputs().get() outputs['mask_file'] = self._gen_output_filename() return outputs - + class SimpleThresholdInputSpec(BaseInterfaceInputSpec): volumes = InputMultiPath(File(exists=True), desc='volumes to be thresholded', mandatory=True) threshold = traits.Float(desc='volumes to be thresholdedeverything below this value will be set to zero', mandatory=True) - - + + class SimpleThresholdOutputSpec(TraitedSpec): thresholded_volumes = OutputMultiPath(File(exists=True), desc="thresholded volumes") - + class SimpleThreshold(BaseInterface): input_spec = SimpleThresholdInputSpec output_spec = SimpleThresholdOutputSpec - + def _run_interface(self, runtime): for fname in self.inputs.volumes: img = nb.load(fname) data = np.array(img.get_data()) - + active_map = data > self.inputs.threshold - + thresholded_map = np.zeros(data.shape) thresholded_map[active_map] = data[active_map] new_img = nb.Nifti1Image(thresholded_map, img.get_affine(), img.get_header()) _, base, _ = split_filename(fname) - nb.save(new_img, base + '_thresholded.nii') - + nb.save(new_img, base + '_thresholded.nii') + return runtime - + def _list_outputs(self): outputs = self._outputs().get() outputs["thresholded_volumes"] = [] @@ -134,32 +134,32 @@ def _list_outputs(self): class ModifyAffineInputSpec(BaseInterfaceInputSpec): volumes = InputMultiPath(File(exists=True), desc='volumes which affine matrices will be modified', mandatory=True) transformation_matrix = traits.Array(value=np.eye(4), shape=(4,4), desc="transformation matrix that will be left multiplied by the affine matrix", usedefault=True) - + class ModifyAffineOutputSpec(TraitedSpec): transformed_volumes = OutputMultiPath(File(exist=True)) - + class ModifyAffine(BaseInterface): ''' Left multiplies the affine matrix with a specified values. Saves the volume as a nifti file. ''' input_spec = ModifyAffineInputSpec output_spec = ModifyAffineOutputSpec - + def _gen_output_filename(self, name): _, base, _ = split_filename(name) return os.path.abspath(base + "_transformed.nii") - + def _run_interface(self, runtime): for fname in self.inputs.volumes: img = nb.load(fname) - + affine = img.get_affine() affine = np.dot(self.inputs.transformation_matrix,affine) nb.save(nb.Nifti1Image(img.get_data(), affine, img.get_header()), self._gen_output_filename(fname)) - + return runtime - + def _list_outputs(self): outputs = self._outputs().get() outputs['transformed_volumes'] = [] @@ -180,21 +180,21 @@ class DistanceOutputSpec(TraitedSpec): point1 = traits.Array(shape=(3,)) point2 = traits.Array(shape=(3,)) histogram = File() - + class Distance(BaseInterface): ''' Calculates distance between two volumes. ''' input_spec = DistanceInputSpec output_spec = DistanceOutputSpec - + _hist_filename = "hist.pdf" - + def _find_border(self,data): eroded = binary_erosion(data) border = np.logical_and(data, np.logical_not(eroded)) return border - + def _get_coordinates(self, data, affine): if len(data.shape) == 4: data = data[:,:,:,0] @@ -202,51 +202,51 @@ def _get_coordinates(self, data, affine): indices = np.vstack((indices, np.ones(indices.shape[1]))) coordinates = np.dot(affine,indices) return coordinates[:3,:] - + def _eucl_min(self, nii1, nii2): origdata1 = nii1.get_data().astype(np.bool) border1 = self._find_border(origdata1) - + origdata2 = nii2.get_data().astype(np.bool) border2 = self._find_border(origdata2) - + set1_coordinates = self._get_coordinates(border1, nii1.get_affine()) - + set2_coordinates = self._get_coordinates(border2, nii2.get_affine()) - + dist_matrix = cdist(set1_coordinates.T, set2_coordinates.T) (point1, point2) = np.unravel_index(np.argmin(dist_matrix), dist_matrix.shape) return (euclidean(set1_coordinates.T[point1,:], set2_coordinates.T[point2,:]), set1_coordinates.T[point1,:], set2_coordinates.T[point2,:]) - + def _eucl_cog(self, nii1, nii2): - origdata1 = nii1.get_data().astype(np.bool) + origdata1 = nii1.get_data().astype(np.bool) cog_t = np.array(center_of_mass(origdata1)).reshape(-1,1) cog_t = np.vstack((cog_t, np.array([1]))) cog_t_coor = np.dot(nii1.get_affine(),cog_t)[:3,:] - + origdata2 = nii2.get_data().astype(np.bool) (labeled_data, n_labels) = label(origdata2) - + cogs = np.ones((4,n_labels)) - + for i in range(n_labels): cogs[:3,i] = np.array(center_of_mass(origdata2, labeled_data, i+1)) - + cogs_coor = np.dot(nii2.get_affine(),cogs)[:3,:] - + dist_matrix = cdist(cog_t_coor.T, cogs_coor.T) - + return np.mean(dist_matrix) - + def _eucl_mean(self, nii1, nii2, weighted=False): origdata1 = nii1.get_data().astype(np.bool) border1 = self._find_border(origdata1) - + origdata2 = nii2.get_data().astype(np.bool) - - set1_coordinates = self._get_coordinates(border1, nii1.get_affine()) + + set1_coordinates = self._get_coordinates(border1, nii1.get_affine()) set2_coordinates = self._get_coordinates(origdata2, nii2.get_affine()) - + dist_matrix = cdist(set1_coordinates.T, set2_coordinates.T) min_dist_matrix = np.amin(dist_matrix, axis = 0) plt.figure() @@ -254,27 +254,27 @@ def _eucl_mean(self, nii1, nii2, weighted=False): plt.savefig(self._hist_filename) plt.clf() plt.close() - + if weighted: return np.average(min_dist_matrix, weights=nii2.get_data()[origdata2].flat) else: return np.mean(min_dist_matrix) - - + + def _run_interface(self, runtime): nii1 = nb.load(self.inputs.volume1) nii2 = nb.load(self.inputs.volume2) - + if self.inputs.method == "eucl_min": self._distance, self._point1, self._point2 = self._eucl_min(nii1, nii2) - + elif self.inputs.method == "eucl_cog": self._distance = self._eucl_cog(nii1, nii2) - + elif self.inputs.method == "eucl_mean": - self._distance = self._eucl_mean(nii1, nii2) - + self._distance = self._eucl_mean(nii1, nii2) + elif self.inputs.method == "eucl_wmean": self._distance = self._eucl_mean(nii1, nii2, weighted=True) @@ -288,22 +288,22 @@ def _list_outputs(self): elif self.inputs.method in ["eucl_mean", "eucl_wmean"]: outputs['histogram'] = os.path.abspath(self._hist_filename) return outputs - + class OverlapInputSpec(BaseInterfaceInputSpec): volume1 = File(exists=True, mandatory=True, desc="Has to have the same dimensions as volume2.") volume2 = File(exists=True, mandatory=True, desc="Has to have the same dimensions as volume1.") out_file = File("diff.nii", usedefault=True) - + class OverlapOutputSpec(TraitedSpec): jaccard = traits.Float() dice = traits.Float() volume_difference = traits.Int() diff_file = File(exists=True) - + class Overlap(BaseInterface): """ Calculates various overlap measures between two maps. - + Example ------- @@ -312,36 +312,36 @@ class Overlap(BaseInterface): >>> overlap.inputs.volume1 = 'cont2.nii' >>> res = overlap.run() # doctest: +SKIP """ - + input_spec = OverlapInputSpec output_spec = OverlapOutputSpec - + def _bool_vec_dissimilarity(self, booldata1, booldata2, method): methods = {"dice": dice, "jaccard": jaccard} if not (np.any(booldata1) or np.any(booldata2)): return 0 return 1 - methods[method](booldata1.flat, booldata2.flat) - + def _run_interface(self, runtime): nii1 = nb.load(self.inputs.volume1) nii2 = nb.load(self.inputs.volume2) - + origdata1 = np.logical_not(np.logical_or(nii1.get_data() == 0, np.isnan(nii1.get_data()))) origdata2 = np.logical_not(np.logical_or(nii2.get_data() == 0, np.isnan(nii2.get_data()))) for method in ("dice", "jaccard"): setattr(self, '_' + method, self._bool_vec_dissimilarity(origdata1, origdata2, method = method)) - + self._volume = int(origdata1.sum() - origdata2.sum()) - + both_data = np.zeros(origdata1.shape) both_data[origdata1] = 1 both_data[origdata2] += 2 - + nb.save(nb.Nifti1Image(both_data, nii1.get_affine(), nii1.get_header()), self.inputs.out_file) - + return runtime - + def _list_outputs(self): outputs = self._outputs().get() for method in ("dice", "jaccard"): @@ -349,37 +349,37 @@ def _list_outputs(self): outputs['volume_difference'] = self._volume outputs['diff_file'] = os.path.abspath(self.inputs.out_file) return outputs - + class CreateNiftiInputSpec(BaseInterfaceInputSpec): data_file = File(exists=True, mandatory=True, desc="ANALYZE img file") header_file = File(exists=True, mandatory=True, desc="corresponding ANALYZE hdr file") affine = traits.Array(exists=True, desc="affine transformation array") - + class CreateNiftiOutputSpec(TraitedSpec): nifti_file = File(exists=True) class CreateNifti(BaseInterface): input_spec = CreateNiftiInputSpec output_spec = CreateNiftiOutputSpec - + def _gen_output_file_name(self): _, base, _ = split_filename(self.inputs.data_file) return os.path.abspath(base + ".nii") - + def _run_interface(self, runtime): hdr = nb.AnalyzeHeader.from_fileobj(open(self.inputs.header_file, 'rb')) - + if isdefined(self.inputs.affine): affine = self.inputs.affine else: affine = None - + data = hdr.data_from_fileobj(open(self.inputs.data_file, 'rb')) img = nb.Nifti1Image(data, affine, hdr) nb.save(img, self._gen_output_file_name()) - + return runtime - + def _list_outputs(self): outputs = self._outputs().get() outputs['nifti_file'] = self._gen_output_file_name() @@ -401,7 +401,7 @@ class TSNR(BaseInterface): """Computes the time-course SNR for a time series Typically you want to run this on a realigned time-series. - + Example ------- diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index d51396bad9..408a1db56d 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -15,7 +15,7 @@ >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) - + """ from copy import deepcopy @@ -43,7 +43,7 @@ def gcd(a, b): 4 >>> gcd(22,55) 11 - + """ while b > 0: a,b = b, a%b return a @@ -78,7 +78,7 @@ def spm_hrf(RT, P=None, fMRI_T=16): -3.73060781e-02 -3.08373716e-02 -2.05161334e-02 -1.16441637e-02 -5.82063147e-03 -2.61854250e-03 -1.07732374e-03 -4.10443522e-04 -1.46257507e-04] - + """ p = np.array([6,16,1,1,6,0,32],dtype=float) if P is not None: @@ -121,7 +121,7 @@ def scale_timings(timelist, input_units, output_units, time_repetition): input_units: 'secs' or 'scans' output_units: Ibid. time_repetition: float in seconds - + """ if input_units==output_units: _scalefactor = 1. @@ -206,20 +206,20 @@ class SpecifyModel(BaseInterface): ~~~~~~~~ - regressor_names : list of str - list of names corresponding to each column. Should be None if + list of names corresponding to each column. Should be None if automatically assigned. - regressors : list of lists values for each regressor - must correspond to the number of volumes in the functional run - - amplitudes : lists of amplitudes for each event. This will be ignored by + - amplitudes : lists of amplitudes for each event. This will be ignored by SPM's Level1Design. - The following two (tmod, pmod) will be ignored by any Level1Design class + The following two (tmod, pmod) will be ignored by any Level1Design class other than SPM: - - tmod : lists of conditions that should be temporally modulated. Should + - tmod : lists of conditions that should be temporally modulated. Should default to None if not being used. - pmod : list of Bunch corresponding to conditions @@ -251,7 +251,7 @@ class SpecifyModel(BaseInterface): >>> s.inputs.subject_info = info Using pmod: - + >>> info = [Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 50],[100, 180]], durations=[[0],[0]], pmod=[Bunch(name=['amp'],poly=[2],param=[[1,2]]), None]), \ Bunch(conditions=['cond1', 'cond2'], onsets=[[20, 120],[80, 160]], durations=[[0],[0]], pmod=[Bunch(name=['amp'],poly=[2],param=[[1,2]]), None])] >>> s.inputs.subject_info = info @@ -368,7 +368,7 @@ def _list_outputs(self): if not hasattr(self, '_sessinfo'): self._generate_design() outputs['session_info'] = self._sessinfo - + return outputs class SpecifySPMModelInputSpec(SpecifyModelInputSpec): @@ -517,7 +517,7 @@ class SpecifySparseModel(SpecifyModel): see Ghosh et al. (2009) OHBM 2009 http://dl.dropbox.com/u/363467/OHBM2009_HRF.pdf - + Examples -------- diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index 1b664b7760..a078418eb3 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -7,7 +7,7 @@ These functions include: * ArtifactDetect: performs artifact detection on functional images - + * StimulusCorrelation: determines correlation between stimuli schedule and movement/intensity parameters @@ -59,7 +59,7 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): translation_threshold = traits.Float(desc="Threshold (in mm) to use to detect translation-related outliers", mandatory=True, xor=['norm_threshold']) zintensity_threshold = traits.Float(desc="Intensity Z-threshold use to detection images that deviate from the" \ - "mean", mandatory=True) + "mean", mandatory=True) mask_type = traits.Enum('spm_global', 'file', 'thresh', desc="Type of mask that should be used to mask the functional data." \ "*spm_global* uses an spm_global like calculation to determine the" \ "brain mask. *file* specifies a brain mask file (should be an image" \ @@ -72,12 +72,12 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): "True)") save_plot = traits.Bool(True, desc="save plots containing outliers", usedefault=True) - + class ArtifactDetectOutputSpec(TraitedSpec): outlier_files = OutputMultiPath(File(exists=True),desc="One file for each functional run containing a list of 0-based" \ - "indices corresponding to outlier volumes") + "indices corresponding to outlier volumes") intensity_files = OutputMultiPath(File(exists=True),desc="One file for each functional run containing the global intensity" \ - "values determined from the brainmask") + "values determined from the brainmask") norm_files = OutputMultiPath(File, desc="One file for each functional run containing the composite norm") statistic_files = OutputMultiPath(File(exists=True),desc="One file for each functional run containing information about the" \ "different types of artifacts and if design info is provided then" \ @@ -94,7 +94,7 @@ class ArtifactDetect(BaseInterface): True, it computes the movement of the center of each face a cuboid centered around the head and returns the maximal movement across the centers. - + Examples -------- @@ -107,10 +107,10 @@ class ArtifactDetect(BaseInterface): >>> ad.inputs.zintensity_threshold = 3 >>> ad.run() # doctest: +SKIP """ - + input_spec = ArtifactDetectInputSpec output_spec = ArtifactDetectOutputSpec - + def _get_output_filenames(self,motionfile,output_dir): """Generate output files based on motion filenames @@ -120,7 +120,7 @@ def _get_output_filenames(self,motionfile,output_dir): motionfile: file/string Filename for motion parameter file output_dir: string - output directory in which the files will be generated + output directory in which the files will be generated """ if isinstance(motionfile,str): infile = motionfile @@ -136,7 +136,7 @@ def _get_output_filenames(self,motionfile,output_dir): normfile = os.path.join(output_dir,''.join(('norm.',filename,'.txt'))) plotfile = os.path.join(output_dir,''.join(('plot.',filename,'.png'))) return artifactfile,intensityfile,statsfile,normfile,plotfile - + def _list_outputs(self): outputs = self._outputs().get() outputs['outlier_files'] = [] @@ -149,7 +149,7 @@ def _list_outputs(self): for i,f in enumerate(filename_to_list(self.inputs.realigned_files)): outlierfile,intensityfile,statsfile,normfile,plotfile = self._get_output_filenames(f,os.getcwd()) outputs['outlier_files'].insert(i,outlierfile) - outputs['intensity_files'].insert(i,intensityfile) + outputs['intensity_files'].insert(i,intensityfile) outputs['statistic_files'].insert(i,statsfile) if isdefined(self.inputs.use_norm) and self.inputs.use_norm: outputs['norm_files'].insert(i,normfile) @@ -172,7 +172,7 @@ def _get_affine_matrix(self,params): params : np.array (upto 12 long) [translation (3), rotation (3,xyz, radians), scaling (3), shear/affine (3)] - + """ rotfunc = lambda x : np.array([[np.cos(x), np.sin(x)],[-np.sin(x),np.cos(x)]]) q = np.array([0,0,0,0,0,0,1,1,1,0,0,0]) @@ -197,7 +197,7 @@ def _get_affine_matrix(self,params): Sh[(0,0,1),(1,2,2)] = params[9:12] return np.dot(T,np.dot(Rx,np.dot(Ry,np.dot(Rz,np.dot(S,Sh))))) - + def _calc_norm(self,mc,use_differences): """Calculates the maximum overall displacement of the midpoints @@ -213,7 +213,7 @@ def _calc_norm(self,mc,use_differences): ------- norm : at each time point - + """ respos=np.diag([70,70,75]);resneg=np.diag([-70,-110,-45]); # respos=np.diag([50,50,50]);resneg=np.diag([-50,-50,-50]); @@ -271,7 +271,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): Exception("Siemens PACE format not implemented yet") else: Exception("Unknown source for movement parameters") - + if self.inputs.use_norm: # calculate the norm of the motion parameters normval = self._calc_norm(mc,self.inputs.use_differences[0]) @@ -309,7 +309,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): vol = data[:,:,:,t0] mask = mask*(vol>(self._nanmean(vol)/8)) for t0 in range(timepoints): - vol = data[:,:,:,t0] + vol = data[:,:,:,t0] g[t0] = self._nanmean(vol[mask]) if len(find_indices(mask))<(np.prod((x,y,z))/10): intersect_mask = False @@ -343,7 +343,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): outliers = np.unique(np.union1d(iidx,np.union1d(tidx,ridx))) artifactfile,intensityfile,statsfile,normfile,plotfile = self._get_output_filenames(imgfile,cwd) - + # write output to outputfile np.savetxt(artifactfile, outliers, fmt='%d', delimiter=' ') np.savetxt(intensityfile, g, fmt='%.2f', delimiter=' ') @@ -441,12 +441,12 @@ class StimulusCorrelation(BaseInterface): >>> sc.inputs.spm_mat_file = 'SPM.mat' >>> sc.inputs.concatenated_design = False >>> sc.run() # doctest: +SKIP - + """ input_spec = StimCorrInputSpec output_spec = StimCorrOutputSpec - + def _get_output_filenames(self, motionfile, output_dir): """Generate output files based on motion filenames @@ -455,7 +455,7 @@ def _get_output_filenames(self, motionfile, output_dir): motionfile: file/string Filename for motion parameter file output_dir: string - output directory in which the files will be generated + output directory in which the files will be generated """ (filepath,filename) = os.path.split(motionfile) (filename,ext) = os.path.splitext(filename) @@ -465,7 +465,7 @@ def _get_output_filenames(self, motionfile, output_dir): def _stimcorr_core(self,motionfile,intensityfile,designmatrix,cwd=None): """ Core routine for determining stimulus correlation - + """ if not cwd: cwd = os.getcwd() @@ -528,7 +528,7 @@ def _run_interface(self, runtime): self._stimcorr_core(motparamlist[i],intensityfiles[i], matrix, os.getcwd()) return runtime - + def _list_outputs(self): outputs = self._outputs().get() files = [] diff --git a/nipype/algorithms/tests/test_modelgen.py b/nipype/algorithms/tests/test_modelgen.py index 366aad3bce..61971336ea 100644 --- a/nipype/algorithms/tests/test_modelgen.py +++ b/nipype/algorithms/tests/test_modelgen.py @@ -7,7 +7,7 @@ from nibabel import Nifti1Image import numpy as np -from nipype.testing import (assert_equal, assert_false, assert_true, +from nipype.testing import (assert_equal, assert_false, assert_true, assert_raises, assert_almost_equal) from nipype.interfaces.base import Bunch, TraitError from nipype.algorithms.modelgen import (SpecifyModel, SpecifySparseModel, diff --git a/nipype/algorithms/tests/test_rapidart.py b/nipype/algorithms/tests/test_rapidart.py index 4b645ba93c..dbc1ffde7c 100644 --- a/nipype/algorithms/tests/test_rapidart.py +++ b/nipype/algorithms/tests/test_rapidart.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from nipype.testing import (assert_equal, assert_false, assert_true, +from nipype.testing import (assert_equal, assert_false, assert_true, assert_raises, assert_almost_equal) import nipype.algorithms.rapidart as ra from nipype.interfaces.base import Bunch @@ -101,11 +101,11 @@ def test_sc_populate_inputs(): spm_mat_file=None, concatenated_design=None) yield assert_equal, sc.inputs.__dict__.keys(), inputs.__dict__.keys() - + def test_sc_output_filenames(): sc = ra.StimulusCorrelation() outputdir = '/tmp' f = 'motion.nii' corrfile = sc._get_output_filenames(f,outputdir) yield assert_equal, corrfile, '/tmp/qa.motion_stimcorr.txt' - + diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 1bee23d2f3..c632828ad3 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -364,7 +364,7 @@ class ThreedAllineateOutputSpec(AFNITraitedSpec): class ThreedAllineate(AFNICommand): """ - For complete details, see the `3dAllineate Documentation. + For complete details, see the `3dAllineate Documentation. `_ """ diff --git a/nipype/interfaces/afni/tests/test_preprocess.py b/nipype/interfaces/afni/tests/test_preprocess.py index a8513e7b14..1184feea04 100644 --- a/nipype/interfaces/afni/tests/test_preprocess.py +++ b/nipype/interfaces/afni/tests/test_preprocess.py @@ -258,7 +258,7 @@ def test_To3d(): td = dict(slice_order='tz', nt=150, nz=12, TR=2000, tpattern='alt+z') cmd.inputs.time_dependencies = td yield assert_equal, cmd.cmdline, 'to3d -time:tz 150 12 2000 alt+z' - + # time_dependencies provided as a tuple # slice_order, nz, nt, TR, tpattern td = ('zt', 12, 130, 2000, 'alt+z') @@ -290,7 +290,7 @@ def test_To3d(): tpattern='alt+z') yield assert_raises, KeyError, getattr, cmd, 'cmdline' # provide unknown parameters - cmd = afni.To3d(datatype='anat', foo='bar') + cmd = afni.To3d(datatype='anat', foo='bar') yield assert_raises, AttributeError, getattr, cmd, 'cmdline' # order of params cmd = afni.To3d(datatype='anat') @@ -459,7 +459,7 @@ def test_Threedvolreg(): cmd = afni.Threedvolreg() cmd.inputs.time_shift = 14 yield assert_equal, cmd.cmdline, '3dvolreg -tshift 14' - # basefile + # basefile cmd = afni.Threedvolreg() cmd.inputs.basefile = 5 yield assert_equal, cmd.cmdline, '3dvolreg -base 5' diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 37b1cbbdd7..0f644751b8 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -388,7 +388,7 @@ def get_traitsfree(self, **kwargs): out = super(BaseTraitedSpec, self).get(**kwargs) out = self._clean_container(out, skipundefined=True) return out - + def _clean_container(self, object, undefinedval=None, skipundefined=False): """Convert a traited obejct into a pure python representation. """ @@ -441,7 +441,7 @@ def get_hashval(self, hash_method=None): The md5 hash value of the traited spec """ - + dict_withhash = {} dict_nofilename = {} for name, val in sorted(self.get().items()): @@ -470,7 +470,7 @@ def _get_sorteddict(self, object, dictwithhash=False, hash_method=None, hash_fil if hash_files and isinstance(object, str) and os.path.isfile(object): if hash_method == None: hash_method = config.get('execution', 'hash_method') - + if hash_method.lower() == 'timestamp': hash = hash_timestamp(object) elif hash_method.lower() == 'content': @@ -779,14 +779,14 @@ def run(self, **inputs): except Exception, e: if len(e.args) == 0: e.args = ("") - + message = "\nInterface %s failed to run."%self.__class__.__name__ - + if config.has_option('logging', 'interface_level') and config.get('logging', 'interface_level').lower() == 'debug': inputs_str = "Inputs:" + str(self.inputs) + "\n" else: inputs_str = '' - + if len(e.args) == 1 and isinstance(e.args[0], str): e.args = (e.args[0] + " ".join([message, inputs_str]),) else: @@ -1158,13 +1158,13 @@ def _parse_inputs(self, skip=None): first_args = [arg for pos, arg in sorted(initial_args.items())] last_args = [arg for pos, arg in sorted(final_args.items())] return first_args + all_args + last_args - + class StdOutCommandLineInputSpec(CommandLineInputSpec): out_file = File(argstr="> %s", position=-1, genfile=True) - + class StdOutCommandLine(CommandLine): input_spec = StdOutCommandLineInputSpec - + def _gen_filename(self, name): if name is 'out_file': return self._gen_outfilename() diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index 79cd44a248..4307c3859c 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -1,4 +1,4 @@ -""" +""" Change directory to provide relative paths for doctests >>> import os >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) diff --git a/nipype/interfaces/camino/convert.py b/nipype/interfaces/camino/convert.py index 47bca95ada..8a8ead58ce 100644 --- a/nipype/interfaces/camino/convert.py +++ b/nipype/interfaces/camino/convert.py @@ -1,4 +1,4 @@ -""" +""" Change directory to provide relative paths for doctests >>> import os >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 1d3461a460..aab888e72b 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -1,4 +1,4 @@ -""" +""" Change directory to provide relative paths for doctests >>> import os >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) @@ -41,10 +41,10 @@ class DTIFit(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon - >>> fit = cmon.DTIFit() - >>> fit.inputs.scheme_file = 'A.scheme' - >>> fit.inputs.in_file = 'tensor_fitted_data.Bfloat' + >>> import nipype.interfaces.camino as cmon + >>> fit = cmon.DTIFit() + >>> fit.inputs.scheme_file = 'A.scheme' + >>> fit.inputs.in_file = 'tensor_fitted_data.Bfloat' >>> fit.run() # doctest: +SKIP """ _cmd = 'dtfit' @@ -125,11 +125,11 @@ class ModelFit(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon - >>> fit = cmon.ModelFit() - >>> fit.model = 'dt' - >>> fit.inputs.scheme_file = 'A.scheme' - >>> fit.inputs.in_file = 'tensor_fitted_data.Bfloat' + >>> import nipype.interfaces.camino as cmon + >>> fit = cmon.ModelFit() + >>> fit.model = 'dt' + >>> fit.inputs.scheme_file = 'A.scheme' + >>> fit.inputs.in_file = 'tensor_fitted_data.Bfloat' >>> fit.run() # doctest: +SKIP """ _cmd = 'modelfit' diff --git a/nipype/interfaces/camino2trackvis/convert.py b/nipype/interfaces/camino2trackvis/convert.py index 7f099e9a57..e5cc826702 100755 --- a/nipype/interfaces/camino2trackvis/convert.py +++ b/nipype/interfaces/camino2trackvis/convert.py @@ -1,4 +1,4 @@ -""" +""" Change directory to provide relative paths for doctests >>> import os >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) diff --git a/nipype/interfaces/cmtk/convert.py b/nipype/interfaces/cmtk/convert.py index 61b8839802..6984bdfba0 100644 --- a/nipype/interfaces/cmtk/convert.py +++ b/nipype/interfaces/cmtk/convert.py @@ -1,4 +1,4 @@ -""" +""" Change directory to provide relative paths for doctests >>> import os >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) diff --git a/nipype/interfaces/dcm2nii.py b/nipype/interfaces/dcm2nii.py index 779dd84c7b..a56c38620e 100644 --- a/nipype/interfaces/dcm2nii.py +++ b/nipype/interfaces/dcm2nii.py @@ -16,7 +16,7 @@ class Dcm2niiInputSpec(CommandLineInputSpec): reorient_and_crop = traits.Bool(argstr='-x', position=5) output_dir = Directory(exists=True, argstr='-o %s', genfile=True, position=6) config_file = File(exists=True, argstr="-b %s", genfile=True, position=7) - + class Dcm2niiOutputSpec(TraitedSpec): converted_files = OutputMultiPath(File(exists=True)) reoriented_files = OutputMultiPath(File(exists=True)) @@ -27,9 +27,9 @@ class Dcm2niiOutputSpec(TraitedSpec): class Dcm2nii(CommandLine): input_spec=Dcm2niiInputSpec output_spec=Dcm2niiOutputSpec - + _cmd = 'dcm2nii' - + def _format_arg(self, opt, spec, val): if opt in ['gzip_output', 'nii_output', 'anonymize', 'id_in_filename']: spec = deepcopy(spec) @@ -39,16 +39,16 @@ def _format_arg(self, opt, spec, val): spec.argstr += ' n' val = True return super(Dcm2nii, self)._format_arg(opt, spec, val) - + def _run_interface(self, runtime): - + new_runtime = super(Dcm2nii, self)._run_interface(runtime) - (self.output_files, - self.reoriented_files, - self.reoriented_and_cropped_files, + (self.output_files, + self.reoriented_files, + self.reoriented_and_cropped_files, self.bvecs, self.bvals) = self._parse_stdout(new_runtime.stdout) return new_runtime - + def _parse_stdout(self, stdout): files = [] reoriented_files = [] @@ -68,19 +68,19 @@ def _parse_stdout(self, stdout): output_dir = self.inputs.output_dir else: output_dir = self._gen_filename('output_dir') - file = os.path.abspath(os.path.join(output_dir, + file = os.path.abspath(os.path.join(output_dir, line[len("GZip..."):])) elif line.startswith("Number of diffusion directions "): if last_added_file: base, filename, ext = split_filename(last_added_file) bvecs.append(os.path.join(base,filename + ".bvec")) bvals.append(os.path.join(base,filename + ".bval")) - + if file: files.append(file) last_added_file = file continue - + if line.startswith("Reorienting as "): reoriented_files.append(line[len("Reorienting as "):]) skip = True @@ -90,13 +90,13 @@ def _parse_stdout(self, stdout): filename = "c" + filename reoriented_and_cropped_files.append(os.path.join(base, filename)) skip = True - continue - - - + continue + + + skip = False return files, reoriented_files, reoriented_and_cropped_files, bvecs, bvals - + def _list_outputs(self): outputs = self.output_spec().get() outputs['converted_files'] = self.output_files @@ -105,7 +105,7 @@ def _list_outputs(self): outputs['bvecs'] = self.bvecs outputs['bvals'] = self.bvals return outputs - + def _gen_filename(self, name): if name == 'output_dir': return os.getcwd() @@ -117,5 +117,4 @@ def _gen_filename(self, name): f.close() return config_file return None - - \ No newline at end of file + diff --git a/nipype/interfaces/diffusion_toolkit/__init__.py b/nipype/interfaces/diffusion_toolkit/__init__.py index 0f2f2e7afe..14151c6ff3 100644 --- a/nipype/interfaces/diffusion_toolkit/__init__.py +++ b/nipype/interfaces/diffusion_toolkit/__init__.py @@ -1,4 +1,4 @@ from nipype.interfaces.diffusion_toolkit.base import Info from nipype.interfaces.diffusion_toolkit.postproc import SplineFilter from nipype.interfaces.diffusion_toolkit.dti import (DTIRecon, DTITracker) -from nipype.interfaces.diffusion_toolkit.odf import (HARDIMat, ODFRecon, ODFTracker) +from nipype.interfaces.diffusion_toolkit.odf import (HARDIMat, ODFRecon, ODFTracker) diff --git a/nipype/interfaces/diffusion_toolkit/base.py b/nipype/interfaces/diffusion_toolkit/base.py index 829b7ce914..acb437045a 100644 --- a/nipype/interfaces/diffusion_toolkit/base.py +++ b/nipype/interfaces/diffusion_toolkit/base.py @@ -6,7 +6,7 @@ Currently these tools are supported: * TODO - + Examples -------- See the docstrings for the individual classes for 'working' examples. @@ -25,9 +25,9 @@ class Info(object): >>> from nipype.interfaces.diffusion_toolkit import Info >>> Info.version() # doctest: +SKIP >>> Info.subjectsdir() # doctest: +SKIP - + """ - + @staticmethod def version(): """Check for dtk version on system diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index ff731e9a86..25aeddb40a 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -20,7 +20,7 @@ class DTIReconInputSpec(CommandLineInputSpec): DWI = File(desc='Input diffusion volume', argstr='%s',exists=True, mandatory=True,position=1) out_prefix = traits.Str("dti", desc='Output file prefix', argstr='%s', usedefault=True,position=2) - output_type = traits.Enum('nii', 'analyze', 'ni1', 'nii.gz', argstr='-ot %s', desc='output file type', usedefault=True) + output_type = traits.Enum('nii', 'analyze', 'ni1', 'nii.gz', argstr='-ot %s', desc='output file type', usedefault=True) bvecs = File(exists=True, desc = 'b vectors file', argstr='-gm %s', mandatory=True) bvals = File(exists=True,desc = 'b values file', mandatory=True) @@ -38,8 +38,8 @@ class DTIReconInputSpec(CommandLineInputSpec): b0_threshold = traits.Float(desc="""program will use b0 image with the given threshold to mask out high background of fa/adc maps. by default it will calculate threshold automatically. but if it failed, you need to set it manually.""", argstr="-b0_th") - - + + class DTIReconOutputSpec(TraitedSpec): ADC = File(exists=True) B0 = File(exists=True) @@ -57,12 +57,12 @@ class DTIReconOutputSpec(TraitedSpec): class DTIRecon(CommandLine): """Use dti_recon to generate tensors and other maps """ - + input_spec=DTIReconInputSpec output_spec=DTIReconOutputSpec - + _cmd = 'dti_recon' - + def _create_gradient_matrix(self, bvecs_file, bvals_file): _gradient_matrix_file = 'gradient_matrix.txt' bvals = [val for val in re.split('\s+', open(bvals_file).readline().strip())] @@ -76,7 +76,7 @@ def _create_gradient_matrix(self, bvecs_file, bvals_file): gradient_matrix_f.write("%s, %s, %s, %s\n"%(bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i])) gradient_matrix_f.close() return _gradient_matrix_file - + def _format_arg(self, name, spec, value): if name == "bvecs": new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) @@ -102,7 +102,7 @@ def _list_outputs(self): outputs['V3'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v3.'+ output_type)) return outputs - + class DTITrackerInputSpec(CommandLineInputSpec): tensor_file = File(exists=True, desc="reconstructed tensor file") input_type = traits.Enum('nii', 'analyze', 'ni1', 'nii.gz', desc="""input and output file type. accepted values are: @@ -143,23 +143,23 @@ class DTITrackerInputSpec(CommandLineInputSpec): class DTITrackerOutputSpec(TraitedSpec): track_file = File(exists=True) mask_file = File(exists=True) - + class DTITracker(CommandLine): input_spec=DTITrackerInputSpec output_spec=DTITrackerOutputSpec - + _cmd = 'dti_tracker' - + def _run_interface(self, runtime): _, _, ext = split_filename(self.inputs.tensor_file) copyfile(self.inputs.tensor_file, os.path.abspath(self.inputs.input_data_prefix + "_tensor" + ext), copy=False) - + return super(DTITracker, self)._run_interface(runtime) - + def _list_outputs(self): outputs = self.output_spec().get() outputs['track_file'] = os.path.abspath(self.inputs.output_file) if isdefined(self.inputs.output_mask) and self.inputs.output_mask: outputs['mask_file'] = os.path.abspath(self.inputs.output_mask) - + return outputs \ No newline at end of file diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index 25a331dd3c..4559850ade 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -20,7 +20,7 @@ class HARDIMatInputSpec(CommandLineInputSpec): bvecs = File(exists=True, desc = 'b vectors file', argstr='%s', position=1, mandatory=True) - bvals = File(exists=True,desc = 'b values file', mandatory=True) + bvals = File(exists=True,desc = 'b values file', mandatory=True) out_file = File("recon_mat.dat", desc = 'output matrix file', argstr='%s', usedefault=True, position=2) order = traits.Int(argsstr='-order %s', desc="""maximum order of spherical harmonics. must be even number. default is 4""") @@ -30,7 +30,7 @@ class HARDIMatInputSpec(CommandLineInputSpec): figure out the image orientation information. if no such info was found in the given image header, the next 5 options -info, etc., will be used if provided. if image orientation info can be found - in the given reference, all other 5 image orientation options will + in the given reference, all other 5 image orientation options will be IGNORED""") image_info = File(exists=True, argstr='-info %s', desc="""specify image information file. the image info file is generated from original dicom image by diff_unpack program and contains image @@ -42,23 +42,23 @@ class HARDIMatInputSpec(CommandLineInputSpec): numbers and construct the 1st and 2nd vector and calculate the 3rd one automatically. this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f") + as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f") oblique_correction = traits.Bool(desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not adjust gradient accordingly, thus it requires adjustment for correct diffusion tensor calculation""", argstr="-oc") class HARDIMatOutputSpec(TraitedSpec): out_file = File(exists=True, desc='output matrix file') - + class HARDIMat(CommandLine): """Use hardi_mat to calculate a reconstruction matrix from a gradient table """ input_spec=HARDIMatInputSpec output_spec=HARDIMatOutputSpec - + _cmd = 'hardi_mat' - + def _create_gradient_matrix(self, bvecs_file, bvals_file): _gradient_matrix_file = 'gradient_matrix.txt' bvals = [val for val in re.split('\s+', open(bvals_file).readline().strip())] @@ -74,7 +74,7 @@ def _create_gradient_matrix(self, bvecs_file, bvals_file): gradient_matrix_f.write("%s %s %s\n"%(bvecs_x[i], bvecs_y[i], bvecs_z[i])) gradient_matrix_f.close() return _gradient_matrix_file - + def _format_arg(self, name, spec, value): if name == "bvecs": new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) @@ -96,7 +96,7 @@ class ODFReconInputSpec(CommandLineInputSpec): from the number of directions and number of volumes in the raw data. useful when dealing with incomplete raw data set or only using part of raw data set to reconstruct""", mandatory=True) - output_type = traits.Enum('nii', 'analyze', 'ni1', 'nii.gz', argstr='-ot %s', desc='output file type', usedefault=True) + output_type = traits.Enum('nii', 'analyze', 'ni1', 'nii.gz', argstr='-ot %s', desc='output file type', usedefault=True) sharpness = traits.Float(desc="""smooth or sharpen the raw data. factor > 0 is smoothing. factor < 0 is sharpening. default value is 0 NOTE: this option applies to DSI study only""", argstr='-s %f') @@ -114,8 +114,8 @@ class ODFReconInputSpec(CommandLineInputSpec): oblique_correction = traits.Bool(desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not adjust gradient accordingly, thus it requires adjustment for correct diffusion tensor calculation""", argstr="-oc") - - + + class ODFReconOutputSpec(TraitedSpec): B0 = File(exists=True) DWI = File(exists=True) @@ -126,12 +126,12 @@ class ODFReconOutputSpec(TraitedSpec): class ODFRecon(CommandLine): """Use odf_recon to generate tensors and other maps """ - + input_spec=ODFReconInputSpec output_spec=ODFReconOutputSpec - + _cmd = 'odf_recon' - + def _list_outputs(self): out_prefix = self.inputs.out_prefix output_type = self.inputs.output_type @@ -143,7 +143,7 @@ def _list_outputs(self): outputs['ODF'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_odf.'+ output_type)) if isdefined(self.inputs.output_entropy): outputs['entropy'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_entropy.'+ output_type)) - + return outputs class ODFTrackerInputSpec(CommandLineInputSpec): @@ -197,31 +197,31 @@ class ODFTrackerInputSpec(CommandLineInputSpec): sagittal image is PIL. this information also is NOT needed for tracking but will be saved in the track file and is essential for track display to map onto - the right coordinates""") - + the right coordinates""") + class ODFTrackerOutputSpec(TraitedSpec): track_file = File(exists=True, desc='output track file') class ODFTracker(CommandLine): """Use odf_tracker to generate track file """ - + input_spec=ODFTrackerInputSpec output_spec=ODFTrackerOutputSpec - + _cmd = 'odf_tracker' def _run_interface(self, runtime): _, _, ext = split_filename(self.inputs.max) copyfile(self.inputs.max, os.path.abspath(self.inputs.input_data_prefix + "_max" + ext), copy=False) - + _, _, ext = split_filename(self.inputs.ODF) copyfile(self.inputs.ODF, os.path.abspath(self.inputs.input_data_prefix + "_odf" + ext), copy=False) - + return super(ODFTracker, self)._run_interface(runtime) - + def _list_outputs(self): outputs = self.output_spec().get() outputs['track_file'] = os.path.abspath(self.inputs.out_file) return outputs - + diff --git a/nipype/interfaces/diffusion_toolkit/postproc.py b/nipype/interfaces/diffusion_toolkit/postproc.py index 20633ccf94..a2cd70ad7a 100644 --- a/nipype/interfaces/diffusion_toolkit/postproc.py +++ b/nipype/interfaces/diffusion_toolkit/postproc.py @@ -14,21 +14,21 @@ from nipype.interfaces.base import (TraitedSpec, File, traits, CommandLine, CommandLineInputSpec) - + class SplineFilterInputSpec(CommandLineInputSpec): track_file = File(exists=True, desc="file containing tracks to be filtered", position=0, argstr="%s", mandatory=True) step_length = traits.Float(desc="in the unit of minimum voxel size", position=1, argstr="%f", mandatory=True) output_file = File("spline_tracks.trk", desc="target file for smoothed tracks", position=2, argstr="%s", usedefault=True) - + class SplineFilterOutputSpec(TraitedSpec): smoothed_track_file = File(exists=True) - + class SplineFilter(CommandLine): input_spec=SplineFilterInputSpec output_spec=SplineFilterOutputSpec - + _cmd = "spline_filter" - + def _list_outputs(self): outputs = self.output_spec().get() outputs['smoothed_track_file'] = os.path.abspath(self.inputs.output_file) diff --git a/nipype/interfaces/freesurfer/base.py b/nipype/interfaces/freesurfer/base.py index 356e1a4a96..0a3d8fe350 100644 --- a/nipype/interfaces/freesurfer/base.py +++ b/nipype/interfaces/freesurfer/base.py @@ -7,7 +7,7 @@ * Dicom2Nifti: using mri_convert * Resample: using mri_convert - + Examples -------- See the docstrings for the individual classes for 'working' examples. @@ -33,23 +33,23 @@ class Info(object): >>> from nipype.interfaces.freesurfer import Info >>> Info.version() # doctest: +SKIP >>> Info.subjectsdir() # doctest: +SKIP - + """ - + @staticmethod def version(): """Check for freesurfer version on system - + Find which freesurfer is being used....and get version from /path/to/freesurfer/build-stamp.txt - + Returns ------- - + version : string - version number as string + version number as string or None if freesurfer version not found - + """ fs_home = os.getenv('FREESURFER_HOME') if fs_home is None: @@ -61,23 +61,23 @@ def version(): version = fid.readline() fid.close() return version - + @classmethod def subjectsdir(cls): """Check the global SUBJECTS_DIR - + Parameters ---------- - + subjects_dir : string The system defined subjects directory - + Returns ------- - + subject_dir : string Represents the current environment setting of SUBJECTS_DIR - + """ if cls.version(): return os.environ['SUBJECTS_DIR'] @@ -86,15 +86,15 @@ def subjectsdir(cls): class FSTraitedSpec(CommandLineInputSpec): subjects_dir = Directory(exists=True, desc='subjects directory') - + class FSCommand(CommandLine): """General support for FreeSurfer commands. Every FS command accepts 'subjects_dir' input. """ - + input_spec = FSTraitedSpec - + _subjects_dir = None def __init__(self, **inputs): diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index d45dcb5287..631e3a59f8 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -54,7 +54,7 @@ class MRISPreprocInputSpec(FSTraitedSpec): desc='projection fraction for vol2surf') fwhm = traits.Float(argstr='--fwhm %f', xor=['num_iters'], - desc='smooth by fwhm mm on the target surface') + desc='smooth by fwhm mm on the target surface') num_iters = traits.Int(argstr='--niters %d', xor=['fwhm'], desc='niters : smooth by niters on the target surface') @@ -69,14 +69,14 @@ class MRISPreprocInputSpec(FSTraitedSpec): class MRISPreprocOutputSpec(TraitedSpec): out_file = File(exists=True, desc='preprocessed output file') - + class MRISPreproc(FSCommand): """Use FreeSurfer mris_preproc to prepare a group of contrasts for a second level analysis - + Examples -------- - + >>> preproc = MRISPreproc() >>> preproc.inputs.target = 'fsaverage' >>> preproc.inputs.hemi = 'lh' @@ -100,11 +100,11 @@ def _list_outputs(self): 'concat_%s_%s.mgz'%(self.inputs.hemi, self.inputs.target)) return outputs - + def _gen_filename(self, name): if name == 'out_file': return self._list_outputs()[name] - return None + return None class GLMFitInputSpec(FSTraitedSpec): glm_dir = traits.Str(argstr='--glmdir %s', desc='save outputs to dir', @@ -119,7 +119,7 @@ class GLMFitInputSpec(FSTraitedSpec): desc='design matrix file') contrast = InputMultiPath(File(exists=True), argstr='--C %s...', desc='contrast file') - + one_sample = traits.Bool(argstr='--osgm', xor=('one_sample', 'fsgd', 'design', 'contrast'), desc='construct X and C as a one-sample group mean') @@ -131,7 +131,7 @@ class GLMFitInputSpec(FSTraitedSpec): argstr='--selfreg %d %d %d', desc='self-regressor from index col row slice') weighted_ls = File(exists=True, argstr='--wls %s', - xor = ('weight_file', 'weight_inv', 'weight_sqrt'), + xor = ('weight_file', 'weight_inv', 'weight_sqrt'), desc='weighted least squares') fixed_fx_var = File(exists=True, argstr='--yffxvar %s', desc='for fixed effects analysis') @@ -242,7 +242,7 @@ class GLMFitOutputSpec(TraitedSpec): frame_eigenvectors = File(desc="matrix of frame eigenvectors from residual PCA") singular_values = File(desc="matrix singular values from residual PCA") svd_stats_file = File(desc="text file summarizing the residual PCA") - + class GLMFit(FSCommand): """Use FreeSurfer's mri_glmfit to specify and estimate a general linear model. @@ -254,7 +254,7 @@ class GLMFit(FSCommand): >>> glmfit.inputs.one_sample = True >>> glmfit.cmdline == 'mri_glmfit --glmdir %s --y functional.nii --osgm'%os.getcwd() True - + """ _cmd = 'mri_glmfit' @@ -320,14 +320,14 @@ def _list_outputs(self): def _gen_filename(self, name): if name == 'glm_dir': return os.getcwd() - return None - + return None + class OneSampleTTest(GLMFit): def __init__(self, **kwargs): super(OneSampleTTest, self).__init__(**kwargs) self.inputs.one_sample = True - + class BinarizeInputSpec(FSTraitedSpec): in_file = File(exists=True, argstr='--i %s', mandatory=True, @@ -364,7 +364,7 @@ class BinarizeInputSpec(FSTraitedSpec): frame_no = traits.Int(argstr='--frame %s', desc='use 0-based frame of input (default is 0)') merge_file = File(exists=True, argstr='--merge %s', - desc='merge with mergevol') + desc='merge with mergevol') mask_file = File(exists=True, argstr='--mask maskvol', desc='must be within mask') mask_thresh = traits.Float(argstr='--mask-thresh %f', @@ -387,17 +387,17 @@ class BinarizeInputSpec(FSTraitedSpec): class BinarizeOutputSpec(TraitedSpec): binary_file = File(exists=True, desc='binarized output volume') count_file = File(desc='ascii file containing number of hits') - + class Binarize(FSCommand): """Use FreeSurfer mri_binarize to threshold an input volume Examples -------- - + >>> binvol = Binarize(in_file='structural.nii', min=10, binary_file='foo_out.nii') >>> binvol.cmdline 'mri_binarize --o foo_out.nii --i structural.nii --min 10.000000' - + """ _cmd = 'mri_binarize' @@ -441,11 +441,11 @@ def _format_arg(self, name, spec, value): if name == 'out_type': return '' return super(Binarize, self)._format_arg(name, spec, value) - + def _gen_filename(self, name): if name == 'binary_file': return self._list_outputs()[name] - return None + return None class ConcatenateInputSpec(FSTraitedSpec): @@ -519,11 +519,11 @@ def _list_outputs(self): else: outputs['concatenated_file'] = self.inputs.concatenated_file return outputs - + def _gen_filename(self, name): if name == 'concatenated_file': return self._list_outputs()[name] - return None + return None class SegStatsInputSpec(FSTraitedSpec): _xor_inputs = ('segmentation_file', 'annot', 'surf_label') @@ -600,7 +600,7 @@ class SegStats(FSCommand): Examples -------- - + >>> import nipype.interfaces.freesurfer as fs >>> ss = fs.SegStats() >>> ss.inputs.annot = ('PWS04', 'lh', 'aparc') @@ -610,7 +610,7 @@ class SegStats(FSCommand): >>> ss.inputs.summary_file = './summary.stats' >>> ss.cmdline 'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --i functional.nii --sum ./summary.stats' - + """ _cmd = 'mri_segstats' @@ -649,11 +649,11 @@ def _format_arg(self, name, spec, value): fname = value return spec.argstr % fname return super(SegStats, self)._format_arg(name, spec, value) - + def _gen_filename(self, name): if name == 'summary_file': return self._list_outputs()[name] - return None + return None class Label2VolInputSpec(FSTraitedSpec): @@ -722,11 +722,11 @@ class Label2Vol(FSCommand): Examples -------- - + >>> binvol = Label2Vol(label_file='cortex.label', template_file='structural.nii', reg_file='register.dat', fill_thresh=0.5, vol_label_file='foo_out.nii') >>> binvol.cmdline 'mri_label2vol --fillthresh 0 --label cortex.label --reg register.dat --temp structural.nii --o foo_out.nii' - + """ _cmd = 'mri_label2vol' diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index c450a0da5a..97bd62404e 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -42,7 +42,7 @@ class ParseDICOMDirOutputSpec(TraitedSpec): class ParseDICOMDir(FSCommand): """Uses mri_parse_sdcmdir to get information from dicom directories - + Examples -------- @@ -53,13 +53,13 @@ class ParseDICOMDir(FSCommand): >>> dcminfo.inputs.summarize = True >>> dcminfo.cmdline 'mri_parse_sdcmdir --d . --o dicominfo.txt --sortbyrun --summarize' - + """ _cmd = 'mri_parse_sdcmdir' input_spec = ParseDICOMDirInputSpec output_spec = ParseDICOMDirOutputSpec - + def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.dicom_info_file): @@ -251,7 +251,7 @@ class MRIConvertInputSpec(FSTraitedSpec): position=-2, argstr='--input_volume %s', desc='File to read/convert') - out_file = File(argstr='--output_volume %s', + out_file = File(argstr='--output_volume %s', position=-1, genfile=True, desc='output filename or True to generate one') conform = traits.Bool(argstr='--conform', @@ -327,7 +327,7 @@ class MRIConvert(FSCommand): >>> mc.inputs.out_type = 'mgz' >>> mc.cmdline 'mri_convert --out_type mgz --input_volume structural.nii --output_volume outfile.mgz' - + """ _cmd = 'mri_convert' input_spec = MRIConvertInputSpec @@ -344,7 +344,7 @@ def _format_arg(self, name, spec, value): if value == 'niigz': return spec.argstr % 'nii' return super(MRIConvert, self)._format_arg(name, spec, value) - + def _get_outfilename(self): outfile = self.inputs.out_file if not isdefined(outfile): @@ -357,7 +357,7 @@ def _get_outfilename(self): suffix=suffix, use_ext=False) return outfile - + def _list_outputs(self): outputs = self.output_spec().get() outfile = self._get_outfilename() @@ -401,7 +401,7 @@ def _list_outputs(self): def _gen_filename(self, name): if name == 'out_file': return self._get_outfilename() - return None + return None class DICOMConvertInputSpec(FSTraitedSpec): dicom_dir = Directory(exists=True, mandatory=True, @@ -530,17 +530,17 @@ class ResampleInputSpec(FSTraitedSpec): voxel_size = traits.Tuple(traits.Float, traits.Float, traits.Float, argstr='-vs %.2f %.2f %.2f', desc='triplet of output voxel sizes', mandatory=True) - + class ResampleOutputSpec(TraitedSpec): resampled_file = File(exists=True, desc='output filename') - + class Resample(FSCommand): """Use FreeSurfer mri_convert to up or down-sample image files Examples -------- - + >>> from nipype.interfaces import freesurfer >>> resampler = freesurfer.Resample() >>> resampler.inputs.in_file = 'structural.nii' @@ -548,7 +548,7 @@ class Resample(FSCommand): >>> resampler.inputs.voxel_size = (2.1, 2.1, 2.1) >>> resampler.cmdline 'mri_convert -vs 2.10 2.10 2.10 -i structural.nii -o resampled.nii' - + """ _cmd = 'mri_convert' @@ -563,13 +563,13 @@ def _get_outfilename(self): newpath = os.getcwd(), suffix='_resample') return outfile - - + + def _list_outputs(self): outputs = self.output_spec().get() outputs['resampled_file'] = self._get_outfilename() return outputs - + def _gen_filename(self, name): if name == 'resampled_file': return self._get_outfilename() @@ -580,7 +580,7 @@ class ReconAllInputSpec(CommandLineInputSpec): usedefault=True) directive = traits.Enum('all', 'autorecon1', 'autorecon2', 'autorecon2-cp', 'autorecon2-wm', 'autorecon2-inflate1', 'autorecon2-perhemi', - 'autorecon3', 'localGI', 'qcache', argstr='-%s', + 'autorecon3', 'localGI', 'qcache', argstr='-%s', desc='process directive', usedefault=True) hemi = traits.Enum('lh', 'rh', desc='hemisphere to process', argstr="-hemi %s") T1_files = InputMultiPath(File(exists=True), argstr='-i %s...', @@ -588,18 +588,18 @@ class ReconAllInputSpec(CommandLineInputSpec): subjects_dir = Directory(exists=True, argstr='-sd %s', desc='path to subjects directory', genfile=True) flags = traits.Str(argstr='%s', desc='additional parameters') - + class ReconAllIOutputSpec(FreeSurferSource.output_spec): subjects_dir = Directory(exists=True, desc='Freesurfer subjects directory.') subject_id = traits.Str(desc='Subject name for whom to retrieve data') class ReconAll(CommandLine): """Uses recon-all to generate surfaces and parcellations of structural data - from anatomical images of a subject. + from anatomical images of a subject. Examples -------- - + >>> from nipype.interfaces.freesurfer import ReconAll >>> reconall = ReconAll() >>> reconall.inputs.subject_id = 'foo' @@ -608,13 +608,13 @@ class ReconAll(CommandLine): >>> reconall.inputs.T1_files = 'structural.nii' >>> reconall.cmdline 'recon-all -i structural.nii -all -subjid foo -sd .' - + """ _cmd = 'recon-all' input_spec = ReconAllInputSpec output_spec = ReconAllIOutputSpec - + def _gen_subjects_dir(self): return os.getcwd() @@ -631,14 +631,14 @@ def _list_outputs(self): subjects_dir = self.inputs.subjects_dir else: subjects_dir = self._gen_subjects_dir() - + if isdefined(self.inputs.hemi): hemi = self.inputs.hemi else: hemi = 'both' - + outputs = self._outputs().get() - + outputs.update(FreeSurferSource(subject_id=self.inputs.subject_id, subjects_dir=subjects_dir, hemi=hemi)._list_outputs()) outputs['subject_id'] = self.inputs.subject_id @@ -662,7 +662,7 @@ class BBRegisterInputSpec(FSTraitedSpec): genfile=True) spm_nifti = traits.Bool(argstr="--spm-nii", desc="force use of nifti rather than analyze with SPM") - epi_mask = traits.Bool(argstr="--epi-mask",desc="mask out B0 regions in stages 1 and 2") + epi_mask = traits.Bool(argstr="--epi-mask",desc="mask out B0 regions in stages 1 and 2") out_fsl_file = traits.Either(traits.Bool, File, argstr="--fslmat %s", desc="write the transformation matrix in FSL FLIRT format") registered_file = traits.Either(traits.Bool, File, argstr='--o %s', @@ -684,7 +684,7 @@ class BBRegister(FSCommand): Examples -------- - + >>> from nipype.interfaces.freesurfer import BBRegister >>> bbreg = BBRegister(subject_id='me', source_file='structural.nii', init='header', contrast_type='t2') >>> bbreg.cmdline @@ -695,7 +695,7 @@ class BBRegister(FSCommand): _cmd = 'bbregister' input_spec = BBRegisterInputSpec output_spec = BBRegisterOutputSpec - + def _list_outputs(self): outputs = self.output_spec().get() outputs['out_reg_file'] = self.inputs.out_reg_file @@ -724,11 +724,11 @@ def _format_arg(self, name, spec, value): fname = value return spec.argstr % fname return super(BBRegister, self)._format_arg(name, spec, value) - + def _gen_filename(self, name): if name == 'out_reg_file': return self._list_outputs()[name] - return None + return None class ApplyVolTransformInputSpec(FSTraitedSpec): source_file = File(exists = True, argstr = '--mov %s', @@ -774,7 +774,7 @@ class ApplyVolTransform(FSCommand): Examples -------- - + >>> from nipype.interfaces.freesurfer import ApplyVolTransform >>> applyreg = ApplyVolTransform() >>> applyreg.inputs.source_file = 'structural.nii' @@ -809,11 +809,11 @@ def _list_outputs(self): outputs = self.output_spec().get() outputs['transformed_file'] = self._get_outfile() return outputs - + def _gen_filename(self, name): if name == 'transformed_file': return self._get_outfile() - return None + return None class SmoothInputSpec(FSTraitedSpec): in_file= File(exists=True, desc='source volume', @@ -839,8 +839,8 @@ class SmoothInputSpec(FSTraitedSpec): desc='volumesmoothing outside of surface') class SmoothOutputSpec(TraitedSpec): - smoothed_file= File(exist=True,desc='smoothed input volume') - + smoothed_file= File(exist=True,desc='smoothed input volume') + class Smooth(FSCommand): """Use FreeSurfer mris_volsmooth to smooth a volume @@ -860,7 +860,7 @@ class Smooth(FSCommand): >>> smoothvol = Smooth(in_file='functional.nii', smoothed_file = 'foo_out.nii', reg_file='register.dat', surface_fwhm=10, vol_fwhm=6) >>> smoothvol.cmdline 'mris_volsmooth --i functional.nii --reg register.dat --o foo_out.nii --fwhm 10 --vol-fwhm 6' - + """ _cmd = 'mris_volsmooth' @@ -918,7 +918,7 @@ class RobustRegisterInputSpec(FSTraitedSpec): no_init = traits.Bool(argstr='--noinit',desc='skip transform init') init_orient = traits.Bool(argstr='--initorient', desc='use moments for initial orient (recommended for stripped brains)') - max_iterations = traits.Int(argstr='--maxit %d', + max_iterations = traits.Int(argstr='--maxit %d', desc='maximum # of times on each resolution') high_iterations = traits.Int(argstr='--highit %d', desc='max # of times on highest resolution') @@ -974,7 +974,7 @@ class RobustRegister(FSCommand): _cmd = 'mri_robust_register' input_spec = RobustRegisterInputSpec output_spec = RobustRegisterOutputSpec - + def _format_arg(self, name, spec, value): for option in ["registered_file", "weights_file", "half_source", "half_targ", "half_weights", "half_source_xfm", "half_targ_xfm"]: @@ -1016,7 +1016,7 @@ def _list_outputs(self): def _gen_filename(self, name): if name == 'out_reg_file': return self._list_outputs()[name] - return None + return None class FitMSParamsInputSpec(FSTraitedSpec): @@ -1025,7 +1025,7 @@ class FitMSParamsInputSpec(FSTraitedSpec): tr_list = traits.List(traits.Int, desc="list of TRs of the input files (in msec)") te_list = traits.List(traits.Float, desc="list of TEs of the input files (in msec)") flip_list = traits.List(traits.Int, desc="list of flip angles of the input files") - xfm_list = traits.List(File, exists=True, + xfm_list = traits.List(File, exists=True, desc="list of transform files to apply to each FLASH image") out_dir = Directory(argstr="%s",position=-1, genfile=True, desc="directory to store output in") diff --git a/nipype/interfaces/freesurfer/tests/test_model.py b/nipype/interfaces/freesurfer/tests/test_model.py index 9f533ad92e..f105eebdf6 100644 --- a/nipype/interfaces/freesurfer/tests/test_model.py +++ b/nipype/interfaces/freesurfer/tests/test_model.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from nipype.testing import (assert_equal, assert_false, assert_true, +from nipype.testing import (assert_equal, assert_false, assert_true, assert_raises, skipif) import nipype.interfaces.freesurfer as freesurfer diff --git a/nipype/interfaces/freesurfer/tests/test_preprocess.py b/nipype/interfaces/freesurfer/tests/test_preprocess.py index 373879d379..78b85daac0 100644 --- a/nipype/interfaces/freesurfer/tests/test_preprocess.py +++ b/nipype/interfaces/freesurfer/tests/test_preprocess.py @@ -5,7 +5,7 @@ import nibabel as nif import numpy as np from tempfile import mkdtemp -from nipype.testing import (assert_equal, assert_false, assert_true, +from nipype.testing import (assert_equal, assert_false, assert_true, assert_raises, skipif) import nipype.interfaces.freesurfer as freesurfer @@ -14,7 +14,7 @@ def no_freesurfer(): return True else: return False - + @skipif(no_freesurfer) def test_applyvoltransform(): input_map = dict(args = dict(argstr='%s',), @@ -78,7 +78,7 @@ def test_dicomconvert(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value -@skipif(no_freesurfer) +@skipif(no_freesurfer) def test_mriconvert(): input_map = dict(apply_inv_transform = dict(argstr='--apply_inverse_transform %s',), apply_transform = dict(argstr='--apply_transform %s',), @@ -167,7 +167,7 @@ def test_mriconvert(): for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value -@skipif(no_freesurfer) +@skipif(no_freesurfer) def test_parsedicomdir(): input_map = dict(args = dict(argstr='%s',), dicom_dir = dict(mandatory=True,argstr='--d %s',), @@ -181,7 +181,7 @@ def test_parsedicomdir(): for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value -@skipif(no_freesurfer) +@skipif(no_freesurfer) def test_reconall(): input_map = dict(T1_files = dict(argstr='-i %s...',), args = dict(argstr='%s',), @@ -196,7 +196,7 @@ def test_reconall(): for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value, "key = %s"%key -@skipif(no_freesurfer) +@skipif(no_freesurfer) def test_resample(): input_map = dict(args = dict(argstr='%s',), environ = dict(), @@ -209,7 +209,7 @@ def test_resample(): for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value -@skipif(no_freesurfer) +@skipif(no_freesurfer) def test_smooth(): input_map = dict(args = dict(argstr='%s',), environ = dict(), @@ -227,7 +227,7 @@ def test_smooth(): for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value -@skipif(no_freesurfer) +@skipif(no_freesurfer) def test_unpacksdicomdir(): input_map = dict(args = dict(argstr='%s',), config = dict(mandatory=True,xor=('run_info', 'config', 'seq_config'),argstr='-cfg %s',), @@ -263,7 +263,7 @@ def create_files_in_directory(): nif.save(nif.Nifti1Image(img,np.eye(4),hdr), os.path.join(outdir,f)) return filelist, outdir, cwd - + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) diff --git a/nipype/interfaces/freesurfer/tests/test_utils.py b/nipype/interfaces/freesurfer/tests/test_utils.py index b611271ac6..cf19091fa7 100644 --- a/nipype/interfaces/freesurfer/tests/test_utils.py +++ b/nipype/interfaces/freesurfer/tests/test_utils.py @@ -77,7 +77,7 @@ def test_sample2surf(): yield assert_equal, s2s.cmdline, ("mri_vol2surf " "--hemi lh --o %s --ref %s --projfrac 0.500 --mov %s" %(os.path.join(cwd, "lh.a.mgz"),files[1],files[0])) - + # Test identity s2sish = fs.SampleToSurface(source_file = files[1], reference_file = files[0],hemi="rh") yield assert_not_equal, s2s, s2sish @@ -96,7 +96,7 @@ def set_illegal_range(): @skipif(no_freesurfer) def test_surfsmooth(): - + smooth = fs.SurfaceSmooth() # Test underlying command @@ -176,7 +176,7 @@ def test_applymask(): indict = {input:filelist[0]} willbreak = fs.ApplyMask(**indict) yield assert_raises, ValueError, willbreak.run - + # Now test a basic command line masker.inputs.in_file = filelist[0] masker.inputs.mask_file = filelist[1] @@ -219,7 +219,7 @@ def test_surfshots(): # Test that the tcl script gets written fotos._write_tcl_script() - yield assert_equal, True, os.path.exists("snapshots.tcl") + yield assert_equal, True, os.path.exists("snapshots.tcl") # Test that we can use a different tcl script foo = open("other.tcl", "w").close() @@ -234,6 +234,6 @@ def test_surfshots(): os.environ["DISPLAY"] = hold_display except KeyError: pass - + # Clean up our mess clean_directory(cwd, oldwd) diff --git a/nipype/interfaces/fsl/__init__.py b/nipype/interfaces/fsl/__init__.py index b669811aca..cff0ce9930 100644 --- a/nipype/interfaces/fsl/__init__.py +++ b/nipype/interfaces/fsl/__init__.py @@ -24,12 +24,12 @@ FilterRegressor, Overlay, Slicer, PlotTimeSeries, PlotMotionParams, ConvertXFM, SwapDimensions, PowerSpectrum) -from nipype.interfaces.fsl.dti import (EddyCorrect, BEDPOSTX, DTIFit, +from nipype.interfaces.fsl.dti import (EddyCorrect, BEDPOSTX, DTIFit, ProbTrackX, VecReg, ProjThresh, FindTheBiggest, DistanceMap, - TractSkeleton, XFibres, + TractSkeleton, XFibres, MakeDyadicVectors) -from nipype.interfaces.fsl.maths import (ChangeDataType, Threshold, MeanImage, +from nipype.interfaces.fsl.maths import (ChangeDataType, Threshold, MeanImage, ApplyMask, IsotropicSmooth, TemporalFilter, DilateImage, ErodeImage, SpatialFilter, UnaryMaths, BinaryMaths, MultiImageMaths) diff --git a/nipype/interfaces/fsl/base.py b/nipype/interfaces/fsl/base.py index d635ccfaa7..3969232441 100644 --- a/nipype/interfaces/fsl/base.py +++ b/nipype/interfaces/fsl/base.py @@ -67,7 +67,7 @@ def version(): """ # find which fsl being used....and get version from # /path/to/fsl/etc/fslversion - try: + try: basedir = os.environ['FSLDIR'] except KeyError: return None @@ -137,19 +137,19 @@ class FSLCommandInputSpec(CommandLineInputSpec): All command support specifying FSLOUTPUTTYPE dynamically via output_type. - + Example ------- fsl.ExtractRoi(tmin=42, tsize=1, output_type='NIFTI') """ output_type = traits.Enum('NIFTI', Info.ftypes.keys(), desc='FSL output type') - + class FSLCommand(CommandLine): """Base support for FSL commands. - + """ - + input_spec = FSLCommandInputSpec _output_type = None @@ -168,7 +168,7 @@ def __init__(self, **inputs): def _output_update(self): self._output_type = self.inputs.output_type self.inputs.environ.update({'FSLOUTPUTTYPE': self.inputs.output_type}) - + @classmethod def set_default_output_type(cls, output_type): """Set the default output type for FSL classes. @@ -240,13 +240,13 @@ def no_fsl(): """Checks if FSL is NOT installed used with skipif to skip tests that will fail if FSL is not installed""" - + if Info.version() == None: return True else: return False - + def no_fsl_course_data(): """check if FSL_COURSE_DATA is defined and point to a valid directory""" - + return not ("FSL_COURSE_DATA" in os.environ and os.path.isdir(os.environ["FSL_COURSE_DATA"])) \ No newline at end of file diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index d135fda194..a9047752ce 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -24,13 +24,13 @@ warnings.filterwarnings('always', category=UserWarning) class DTIFitInputSpec(FSLCommandInputSpec): - + dwi = File(exists=True, desc = 'diffusion weighted image data file', argstr='-k %s', position=0, mandatory=True) base_name = traits.Str("dtifit_", desc = 'base_name that all output files will start with', argstr='-o %s', position=1, usedefault=True) mask = File(exists=True, desc = 'bet binary mask file', - argstr='-m %s', position=2, mandatory=True) + argstr='-m %s', position=2, mandatory=True) bvecs = File(exists=True, desc = 'b vectors file', argstr='-r %s', position=3, mandatory=True) bvals = File(exists=True,desc = 'b values file', @@ -49,7 +49,7 @@ class DTIFitInputSpec(FSLCommandInputSpec): argstr='--littlebit') class DTIFitOutputSpec(TraitedSpec): - + V1 = File(exists = True, desc = 'path/name of file with the 1st eigenvector') V2 = File(exists = True, desc = 'path/name of file with the 2nd eigenvector') V3 = File(exists = True, desc = 'path/name of file with the 3rd eigenvector') @@ -61,15 +61,15 @@ class DTIFitOutputSpec(TraitedSpec): MO = File(exists = True, desc = 'path/name of file with the mode of anisotropy') S0 = File(exists = True, desc = 'path/name of file with the raw T2 signal with no '+ 'diffusion weighting') - tensor = File(exists = True, desc = 'path/name of file with the 4D tensor volume') + tensor = File(exists = True, desc = 'path/name of file with the 4D tensor volume') class DTIFit(FSLCommand): """ Use FSL dtifit command for fitting a diffusion tensor model at each voxel - + Example ------- - + >>> from nipype.interfaces import fsl >>> dti = fsl.DTIFit() >>> dti.inputs.dwi = 'diffusion.nii' @@ -79,22 +79,22 @@ class DTIFit(FSLCommand): >>> dti.inputs.mask = 'mask.nii' >>> dti.cmdline 'dtifit -k diffusion.nii -o TP -m mask.nii -r bvecs -b bvals' - + """ - + _cmd = 'dtifit' input_spec = DTIFitInputSpec output_spec = DTIFitOutputSpec - - def _list_outputs(self): - outputs = self.output_spec().get() + + def _list_outputs(self): + outputs = self.output_spec().get() for k in outputs.keys(): if k not in ('outputtype','environ','args'): - if k != 'tensor' or (isdefined(self.inputs.save_tensor) + if k != 'tensor' or (isdefined(self.inputs.save_tensor) and self.inputs.save_tensor): outputs[k] = self._gen_fname(self.inputs.base_name,suffix = '_'+k) return outputs - + class EddyCorrectInputSpec(FSLCommandInputSpec): in_file = File(exists=True,desc = '4D input file',argstr='%s', position=0, mandatory=True) out_file = File(desc = '4D output file',argstr='%s', position=1, genfile=True) @@ -105,20 +105,20 @@ class EddyCorrectOutputSpec(TraitedSpec): class EddyCorrect(FSLCommand): """ Deprecated! Please use create_eddy_correct_pipeline instead - + Example ------- - + >>> from nipype.interfaces import fsl >>> eddyc = fsl.EddyCorrect(in_file='diffusion.nii',out_file="diffusion_edc.nii", ref_num=0) >>> eddyc.cmdline 'eddy_correct diffusion.nii diffusion_edc.nii 0' - + """ _cmd = 'eddy_correct' input_spec = EddyCorrectInputSpec output_spec = EddyCorrectOutputSpec - + def __init__(self, **inputs): warnings.warn("Deprecated: Please use create_eddy_correct_pipeline instead", DeprecationWarning) return super(EddyCorrect, self).__init__(**inputs) @@ -131,7 +131,7 @@ def _run_interface(self, runtime): self.raise_exception(runtime) return runtime - def _list_outputs(self): + def _list_outputs(self): outputs = self.output_spec().get() outputs['eddy_corrected'] = self.inputs.out_file if not isdefined(outputs['eddy_corrected']): @@ -144,21 +144,21 @@ def _gen_filename(self, name): else: return None -class BEDPOSTXInputSpec(FSLCommandInputSpec): +class BEDPOSTXInputSpec(FSLCommandInputSpec): dwi = File(exists=True, desc = 'diffusion weighted image data file',mandatory=True) - mask = File(exists=True, desc = 'bet binary mask file',mandatory=True) + mask = File(exists=True, desc = 'bet binary mask file',mandatory=True) bvecs = File(exists=True, desc = 'b vectors file',mandatory=True) bvals = File(exists=True,desc = 'b values file',mandatory=True) bpx_directory = Directory('bedpostx',argstr='%s',usedefault=True, desc='the name for this subject''s bedpostx folder') - + fibres = traits.Int(1,argstr='-n %d', desc='number of fibres per voxel') weight = traits.Float(1.00,argstr='-w %.2f', desc='ARD weight, more weight means less'+ ' secondary fibres per voxel') burn_period = traits.Int(1000,argstr='-b %d', desc='burnin period') jumps = traits.Int(1250,argstr='-j %d', desc='number of jumps') sampling = traits.Int(25,argstr='-s %d', desc='sample every') - + class BEDPOSTXOutputSpec(TraitedSpec): bpx_out_directory = Directory(exists=True, field='dir', desc = 'path/name of directory with all '+ 'bedpostx output files for this subject') @@ -179,38 +179,38 @@ class BEDPOSTXOutputSpec(TraitedSpec): desc='a list of path/name of 3D volume with mean of distribution on f anisotropy') dyads = traits.List(File, exists=True, desc='a list of path/name of mean of PDD distribution in vector form') - + class BEDPOSTX(FSLCommand): """ Deprecated! Please use create_bedpostx_pipeline instead - + Example ------- - + >>> from nipype.interfaces import fsl >>> bedp = fsl.BEDPOSTX(bpx_directory='subjdir', bvecs='bvecs', bvals='bvals', dwi='diffusion.nii', \ mask='mask.nii', fibres=1) >>> bedp.cmdline 'bedpostx subjdir -n 1' - + """ - + _cmd = 'bedpostx' input_spec = BEDPOSTXInputSpec output_spec = BEDPOSTXOutputSpec can_resume = True - + def __init__(self, **inputs): warnings.warn("Deprecated: Please use create_bedpostx_pipeline instead", DeprecationWarning) return super(BEDPOSTX, self).__init__(**inputs) def _run_interface(self, runtime): - - #create the subject specific bpx_directory + + #create the subject specific bpx_directory bpx_directory = os.path.join(os.getcwd(),self.inputs.bpx_directory) self.inputs.bpx_directory = bpx_directory if not os.path.exists(bpx_directory): os.makedirs(bpx_directory) - + _,_,ext = split_filename(self.inputs.mask) shutil.copyfile(self.inputs.mask, os.path.join(self.inputs.bpx_directory,'nodif_brain_mask'+ext)) _,_,ext = split_filename(self.inputs.dwi) @@ -223,23 +223,23 @@ def _run_interface(self, runtime): self.raise_exception(runtime) return runtime - def _list_outputs(self): + def _list_outputs(self): outputs = self.output_spec().get() outputs['bpx_out_directory'] = os.path.join(os.getcwd(),self.inputs.bpx_directory+'.bedpostX') outputs['xfms_directory'] = os.path.join(os.getcwd(),self.inputs.bpx_directory+'.bedpostX','xfms') - + for k in outputs.keys(): if k not in ('outputtype','environ','args','bpx_out_directory','xfms_directory'): outputs[k]=[] - - for n in range(self.inputs.fibres): + + for n in range(self.inputs.fibres): outputs['merged_thsamples'].append(self._gen_fname('merged_th'+repr(n+1)+'samples',suffix='',cwd=outputs['bpx_out_directory'])) outputs['merged_phsamples'].append(self._gen_fname('merged_ph'+repr(n+1)+'samples',suffix='',cwd=outputs['bpx_out_directory'])) - outputs['merged_fsamples'].append(self._gen_fname('merged_f'+repr(n+1)+'samples',suffix='',cwd=outputs['bpx_out_directory'])) + outputs['merged_fsamples'].append(self._gen_fname('merged_f'+repr(n+1)+'samples',suffix='',cwd=outputs['bpx_out_directory'])) outputs['mean_thsamples'].append(self._gen_fname('mean_th'+repr(n+1)+'samples',suffix='',cwd=outputs['bpx_out_directory'])) outputs['mean_phsamples'].append(self._gen_fname('mean_ph'+repr(n+1)+'samples',suffix='',cwd=outputs['bpx_out_directory'])) - outputs['mean_fsamples'].append(self._gen_fname('mean_f'+repr(n+1)+'samples',suffix='',cwd=outputs['bpx_out_directory'])) - outputs['dyads'].append(self._gen_fname('dyads'+repr(n+1),suffix='',cwd=outputs['bpx_out_directory'])) + outputs['mean_fsamples'].append(self._gen_fname('mean_f'+repr(n+1)+'samples',suffix='',cwd=outputs['bpx_out_directory'])) + outputs['dyads'].append(self._gen_fname('dyads'+repr(n+1),suffix='',cwd=outputs['bpx_out_directory'])) return outputs @@ -252,11 +252,11 @@ class ProbTrackXInputSpec(FSLCommandInputSpec): mask = File(exists=True, desc='bet binary mask file in diffusion space', argstr='-m %s', mandatory=True) seed = traits.Either(File(exists=True), traits.List(File(exists=True)), traits.List(traits.List(traits.Int(), minlen=3, maxlen=3)), desc='seed volume(s), or voxel(s)'+ - 'or freesurfer label file',argstr='--seed=%s', mandatory=True) + 'or freesurfer label file',argstr='--seed=%s', mandatory=True) mode = traits.Enum("simple", "two_mask_symm", "seedmask", desc='options: simple (single seed voxel), seedmask (mask of seed voxels),'+ - 'twomask_symm (two bet binary masks) ', argstr='--mode=%s', genfile=True) + 'twomask_symm (two bet binary masks) ', argstr='--mode=%s', genfile=True) target_masks = InputMultiPath(File(exits=True),desc='list of target masks - '+ - 'required for seeds_to_targets classification', argstr='--targetmasks=%s') + 'required for seeds_to_targets classification', argstr='--targetmasks=%s') mask2 =File(exists=True,desc='second bet binary mask (in diffusion space) in twomask_symm mode', argstr='--mask2=%s') waypoints = File(exists=True, desc='waypoint mask or ascii list of waypoint masks - '+ @@ -282,16 +282,16 @@ class ProbTrackXInputSpec(FSLCommandInputSpec): avoid_mp = File(exists=True, desc='reject pathways passing through locations given by this mask', argstr='--avoid=%s') stop_mask = File(exists=True,argstr='--stop=%s', - desc='stop tracking at locations given by this mask file') + desc='stop tracking at locations given by this mask file') xfm = File(exists=True, argstr='--xfm=%s', desc='transformation matrix taking seed space to DTI space '+ - '(either FLIRT matrix or FNIRT warp_field) - default is identity') + '(either FLIRT matrix or FNIRT warp_field) - default is identity') inv_xfm = File( argstr='--invxfm=%s',desc='transformation matrix taking DTI space to seed'+ ' space (compulsory when using a warp_field for seeds_to_dti)') n_samples = traits.Int(5000, argstr='--nsamples=%d',desc='number of samples - default=5000', usedefault=True) n_steps = traits.Int(argstr='--nsteps=%d',desc='number of steps per sample - default=2000') dist_thresh = traits.Float(argstr='--distthresh=%.3f',desc='discards samples shorter than '+ - 'this threshold (in mm - default=0)') + 'this threshold (in mm - default=0)') c_thresh = traits.Float(argstr='--cthr=%.3f',desc='curvature threshold - default=0.2') sample_random_points = traits.Bool(argstr='--sampvox',desc='sample random points within seed voxels') step_length = traits.Float(argstr='--steplength=%.3f',desc='step_length in mm - default=0.5') @@ -319,13 +319,13 @@ class ProbTrackXOutputSpec(TraitedSpec): 'have not been rejected by inclusion/exclusion mask criteria') targets = traits.List(File,exists=True,desc='a list with all generated seeds_to_target files') particle_files = traits.List(File,exists=True) - + class ProbTrackX(FSLCommand): """ Use FSL probtrackx for tractography on bedpostx results - + Examples -------- - + >>> from nipype.interfaces import fsl >>> pbx = fsl.ProbTrackX(samples_base_name='merged', mask='mask.nii', \ seed='MASK_average_thal_right.nii', mode='seedmask', \ @@ -335,13 +335,13 @@ class ProbTrackX(FSLCommand): out_dir='.') >>> pbx.cmdline 'probtrackx --forcedir -m mask.nii --mode=seedmask --nsamples=3 --nsteps=10 --opd --os2t --dir=. --samples=merged --seed=MASK_average_thal_right.nii --targetmasks=targets.txt --xfm=trans.mat' - + """ - + _cmd = 'probtrackx' input_spec = ProbTrackXInputSpec output_spec = ProbTrackXOutputSpec - + def __init__(self, **inputs): warnings.warn("Deprecated: Please use create_bedpostx_pipeline instead", DeprecationWarning) return super(ProbTrackX, self).__init__(**inputs) @@ -354,7 +354,7 @@ def _run_interface(self, runtime): copyfile(self.inputs.phsamples[i-1], self.inputs.samples_base_name + "_ph%dsamples"%i + ext, copy=False) _, _, ext = split_filename(self.inputs.thsamples[i-1]) copyfile(self.inputs.fsamples[i-1], self.inputs.samples_base_name + "_f%dsamples"%i + ext, copy=False) - + if isdefined(self.inputs.target_masks): f = open("targets.txt","w") for target in self.inputs.target_masks: @@ -368,42 +368,42 @@ def _run_interface(self, runtime): else: f.write("%s\n"%seed) f.close() - + runtime = super(ProbTrackX, self)._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime - + def _format_arg(self, name, spec, value): if name == 'target_masks' and isdefined(value): - fname = "targets.txt" + fname = "targets.txt" return super(ProbTrackX, self)._format_arg(name, spec, [fname]) elif name == 'seed' and isinstance(value, list): fname = "seeds.txt" return super(ProbTrackX, self)._format_arg(name, spec, fname) else: return super(ProbTrackX, self)._format_arg(name, spec, value) - - def _list_outputs(self): - outputs = self.output_spec().get() + + def _list_outputs(self): + outputs = self.output_spec().get() if not isdefined(self.inputs.out_dir): out_dir = self._gen_filename("out_dir") else: out_dir = self.inputs.out_dir - - outputs['log'] = os.path.abspath(os.path.join(out_dir,'probtrackx.log')) + + outputs['log'] = os.path.abspath(os.path.join(out_dir,'probtrackx.log')) #utputs['way_total'] = os.path.abspath(os.path.join(out_dir,'waytotal')) if isdefined(self.inputs.opd == True): if isinstance(self.inputs.seed, list) and isinstance(self.inputs.seed[0], list): outputs['fdt_paths'] = [] for seed in self.inputs.seed: - outputs['fdt_paths'].append(os.path.abspath(self._gen_fname("fdt_paths_%s"%("_".join([str(s) for s in seed])), + outputs['fdt_paths'].append(os.path.abspath(self._gen_fname("fdt_paths_%s"%("_".join([str(s) for s in seed])), cwd=out_dir,suffix=''))) - else: + else: outputs['fdt_paths'] = os.path.abspath(self._gen_fname("fdt_paths", cwd=out_dir,suffix='')) - - # handle seeds-to-target output files + + # handle seeds-to-target output files if isdefined(self.inputs.target_masks): outputs['targets']=[] for target in self.inputs.target_masks: @@ -413,7 +413,7 @@ def _list_outputs(self): if isdefined(self.inputs.verbose) and self.inputs.verbose == 2: outputs['particle_files'] = [os.path.abspath(os.path.join(out_dir, 'particle%d'%i)) for i in range(self.inputs.n_samples) ] return outputs - + def _gen_filename(self, name): if name == "out_dir": return os.getcwd() @@ -423,19 +423,19 @@ def _gen_filename(self, name): else: return "seedmask" -class VecRegInputSpec(FSLCommandInputSpec): +class VecRegInputSpec(FSLCommandInputSpec): in_file = File(exists=True,argstr='-i %s',desc='filename for input vector or tensor field', - mandatory=True) + mandatory=True) out_file = File(argstr='-o %s',desc='filename for output registered vector or tensor field', genfile=True) ref_vol = File(exists=True,argstr='-r %s',desc='filename for reference (target) volume', - mandatory=True) + mandatory=True) affine_mat = File(exists=True,argstr='-t %s',desc='filename for affine transformation matrix') warp_field = File(exists=True,argstr='-w %s',desc='filename for 4D warp field for nonlinear registration') rotation_mat = File(exists=True,argstr='--rotmat=%s',desc='filename for secondary affine matrix'+ 'if set, this will be used for the rotation of the vector/tensor field') rotation_warp = File(exists=True,argstr='--rotwarp=%s',desc='filename for secondary warp field'+ - 'if set, this will be used for the rotation of the vector/tensor field') + 'if set, this will be used for the rotation of the vector/tensor field') interpolation = traits.Enum("nearestneighbour", "trilinear", "sinc", "spline",argstr='--interp=%s',desc='interpolation method : '+ 'nearestneighbour, trilinear (default), sinc or spline') mask = File(exists=True,argstr='-m %s',desc='brain mask in input space') @@ -444,15 +444,15 @@ class VecRegInputSpec(FSLCommandInputSpec): class VecRegOutputSpec(TraitedSpec): out_file = File(exists=True,desc='path/name of filename for the registered vector or tensor field') - + class VecReg(FSLCommand): """Use FSL vecreg for registering vector data For complete details, see the FDT Documentation - + Example ------- - + >>> from nipype.interfaces import fsl >>> vreg = fsl.VecReg(in_file='diffusion.nii', \ affine_mat='trans.mat', \ @@ -462,23 +462,23 @@ class VecReg(FSLCommand): 'vecreg -t trans.mat -i diffusion.nii -o diffusion_vreg.nii -r mni.nii' """ - + _cmd = 'vecreg' input_spec = VecRegInputSpec output_spec = VecRegOutputSpec - def _run_interface(self, runtime): + def _run_interface(self, runtime): if not isdefined(self.inputs.out_file): - pth,base_name = os.path.split(self.inputs.in_file) + pth,base_name = os.path.split(self.inputs.in_file) self.inputs.out_file = self._gen_fname(base_name,cwd=os.path.abspath(pth), suffix = '_vreg') return super(VecReg, self)._run_interface(runtime) - - def _list_outputs(self): + + def _list_outputs(self): outputs = self.output_spec().get() outputs['out_file'] = self.inputs.out_file if not isdefined(outputs['out_file']) and isdefined(self.inputs.in_file): - pth,base_name = os.path.split(self.inputs.in_file) + pth,base_name = os.path.split(self.inputs.in_file) outputs['out_file'] = self._gen_fname(base_name,cwd=os.path.abspath(pth), suffix = '_vreg') return outputs @@ -487,7 +487,7 @@ def _gen_filename(self, name): if name is 'out_file': return self._list_outputs()[name] else: - return None + return None class ProjThreshInputSpec(FSLCommandInputSpec): in_files = traits.List(File,exists=True,argstr='%s',desc='a list of input volumes', @@ -495,18 +495,18 @@ class ProjThreshInputSpec(FSLCommandInputSpec): threshold = traits.Int(argstr='%d',desc='threshold indicating minimum '+ 'number of seed voxels entering this mask region', mandatory=True,position=1) - + class ProjThreshOuputSpec(TraitedSpec): out_files = traits.List(File,exists=True,desc='path/name of output volume after thresholding') - + class ProjThresh(FSLCommand): """Use FSL proj_thresh for thresholding some outputs of probtrack For complete details, see the FDT Documentation - + Example ------- - + >>> from nipype.interfaces import fsl >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] >>> pThresh = fsl.ProjThresh(in_files=ldir,threshold=3) @@ -514,38 +514,38 @@ class ProjThresh(FSLCommand): 'proj_thresh seeds_to_M1.nii seeds_to_M2.nii 3' """ - + _cmd = 'proj_thresh' input_spec = ProjThreshInputSpec output_spec = ProjThreshOuputSpec - def _list_outputs(self): + def _list_outputs(self): outputs = self.output_spec().get() outputs['out_files'] = [] for name in self.inputs.in_files: cwd,base_name = os.path.split(name) outputs['out_files'].append(self._gen_fname(base_name,cwd=cwd,suffix='_proj_seg_thr_'+ - repr(self.inputs.threshold))) + repr(self.inputs.threshold))) return outputs class FindTheBiggestInputSpec(FSLCommandInputSpec): in_files = traits.List(File,exists=True,argstr='%s',desc='a list of input volumes or a singleMatrixFile', position=0,mandatory=True) - out_file = File(argstr='%s',desc='file with the resulting segmentation',position=2,genfile=True) - + out_file = File(argstr='%s',desc='file with the resulting segmentation',position=2,genfile=True) + class FindTheBiggestOutputSpec(TraitedSpec): out_file = File(exists=True,argstr='%s',desc='output file indexed in order of input files') - + class FindTheBiggest(FSLCommand): """ Use FSL find_the_biggest for performing hard segmentation on the outputs of connectivity-based thresholding in probtrack. For complete details, see the `FDT Documentation. `_ - + Example ------- - + >>> from nipype.interfaces import fsl >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] >>> fBig = fsl.FindTheBiggest(in_files=ldir, out_file='biggestSegmentation') @@ -553,17 +553,17 @@ class FindTheBiggest(FSLCommand): 'find_the_biggest seeds_to_M1.nii seeds_to_M2.nii biggestSegmentation' """ - + _cmd='find_the_biggest' input_spec = FindTheBiggestInputSpec output_spec = FindTheBiggestOutputSpec - - def _run_interface(self, runtime): + + def _run_interface(self, runtime): if not isdefined(self.inputs.out_file): self.inputs.out_file = self._gen_fname('biggestSegmentation',suffix='') return super(FindTheBiggest, self)._run_interface(runtime) - - def _list_outputs(self): + + def _list_outputs(self): outputs = self.output_spec().get() outputs['out_file'] = self.inputs.out_file if not isdefined(outputs['out_file']): @@ -580,7 +580,7 @@ class TractSkeletonInputSpec(FSLCommandInputSpec): in_file = File(exists=True, mandatory=True, argstr="-i %s", desc="input image (typcially mean FA volume)") - _proj_inputs = ["threshold", "distance_map", "data_file"] + _proj_inputs = ["threshold", "distance_map", "data_file"] project_data = traits.Bool(argstr="-p %.3f %s %s %s %s",requires=_proj_inputs, desc="project data onto skeleton") threshold = traits.Float(desc="skeleton threshold value") @@ -592,7 +592,7 @@ class TractSkeletonInputSpec(FSLCommandInputSpec): desc="perform alternate search using built-in cingulum mask") data_file = File(exists=True, desc="4D data to project onto skeleton (usually FA)") alt_data_file = File(exists=True, argstr="-a %s", desc="4D non-FA data to project onto skeleton") - alt_skeleton = File(exists=True, argstr="-s %s", desc="alternate skeleton to use") + alt_skeleton = File(exists=True, argstr="-s %s", desc="alternate skeleton to use") projected_data = File(desc="input data projected onto skeleton") skeleton_file = traits.Either(traits.Bool, File, argstr="-o %s", desc="write out skeleton image") @@ -603,13 +603,13 @@ class TractSkeletonOutputSpec(TraitedSpec): class TractSkeleton(FSLCommand): """Use FSL's tbss_skeleton to skeletonise an FA image or project arbitrary values onto a skeleton. - + There are two ways to use this interface. To create a skeleton from an FA image, just supply the ``in_file`` and set ``skeleton_file`` to True (or specify a skeleton filename. To project values onto a skeleton, you must set ``project_data`` to True, and then also supply values for ``threshold``, ``distance_map``, and ``data_file``. The ``search_mask_file`` and ``use_cingulum_mask`` inputs are also used in data projection, but ``use_cingulum_mask`` - is set to True by default. This mask controls where the projection algorithm searches + is set to True by default. This mask controls where the projection algorithm searches within a circular space around a tract, rather than in a single perpindicular direction. Examples @@ -645,7 +645,7 @@ def _format_arg(self, name, spec, value): if isinstance(value, bool): return spec.argstr%self._list_outputs()["skeleton_file"] else: - return spec.argstr%value + return spec.argstr%value return super(TractSkeleton, self)._format_arg(name, spec, value) def _list_outputs(self): @@ -670,7 +670,7 @@ def _list_outputs(self): newpath=os.getcwd(), use_ext=True) return outputs - + class DistanceMapInputSpec(FSLCommandInputSpec): @@ -678,7 +678,7 @@ class DistanceMapInputSpec(FSLCommandInputSpec): desc="image to calculate distance values for") mask_file = File(exists=True,argstr="--mask=%s", desc="binary mask to contrain calculations") - invert_input = traits.Bool(argstr="--invert", desc="invert input image") + invert_input = traits.Bool(argstr="--invert", desc="invert input image") local_max_file = traits.Either(traits.Bool, File, argstr="--localmax=%s", desc="write an image of the local maxima") distance_map = File(genfile=True,argstr="--out=%s",desc="distance map to write") @@ -759,11 +759,11 @@ class XFibresInputSpec(FSLCommandInputSpec): model = traits.Int(argstr="--model=%d", desc="Which model to use. \ 1=mono-exponential (default and required for single shell). 2=continous \ exponential (for multi-shell experiments)") - + _xor_inputs1 = ('no_ard', 'all_ard') no_ard = traits.Bool(argstr="--noard", desc="Turn ARD off on all fibres", xor=_xor_inputs1) all_ard = traits.Bool(argstr="--allard", desc="Turn ARD on on all fibres", xor=_xor_inputs1) - + _xor_inputs2 = ('no_spat', 'non_linear') no_spat = traits.Bool(argstr="--nospat", desc="Initialise with tensor, not spatially", xor=_xor_inputs2) non_linear = traits.Bool(argstr="--nonlinear", desc="Initialise with nonlinear fitting", xor=_xor_inputs2) @@ -771,7 +771,7 @@ class XFibresInputSpec(FSLCommandInputSpec): 'do not add + to make a new directory',argstr='--forcedir', usedefault=True) class XFibresOutputSpec(TraitedSpec): - dyads = OutputMultiPath(File(exists=True), desc="Mean of PDD distribution in vector form.") + dyads = OutputMultiPath(File(exists=True), desc="Mean of PDD distribution in vector form.") fsamples = OutputMultiPath(File(exists=True), desc="Samples from the distribution on anisotropic volume fraction") mean_dsamples = File(exists=True, desc="Mean of distribution on diffusivity d") mean_fsamples = OutputMultiPath(File(exists=True), desc="Mean of distribution on f anisotropy") @@ -785,13 +785,13 @@ class XFibres(FSLCommand): _cmd = "xfibres" input_spec = XFibresInputSpec output_spec = XFibresOutputSpec - + def _run_interface(self,runtime): runtime = super(XFibres, self)._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime - + def _list_outputs(self): outputs = self.output_spec().get() outputs["mean_dsamples"] = self._gen_fname("mean_dsamples", cwd=self.inputs.logdir) @@ -807,9 +807,9 @@ def _list_outputs(self): outputs["mean_fsamples"].append(self._gen_fname("mean_f%dsamples"%i, cwd=self.inputs.logdir)) outputs["phsamples"].append(self._gen_fname("ph%dsamples"%i, cwd=self.inputs.logdir)) outputs["thsamples"].append(self._gen_fname("th%dsamples"%i, cwd=self.inputs.logdir)) - + return outputs - + class MakeDyadicVectorsInputSpec(FSLCommandInputSpec): theta_vol = File(exists=True, mandatory=True, position=0, argstr="%s") phi_vol = File(exists=True, mandatory=True, position=1, argstr="%s") @@ -817,15 +817,15 @@ class MakeDyadicVectorsInputSpec(FSLCommandInputSpec): output = File("dyads", position=3, usedefault=True, argstr="%s") perc = traits.Float(desc="the {perc}% angle of the output cone of \ uncertainty (output will be in degrees)", position=4, argstr="%f") - + class MakeDyadicVectorsOutputSpec(TraitedSpec): dyads = File(exists=True) dispersion = File(exists=True) - + class MakeDyadicVectors(FSLCommand): """Create vector volume representing mean principal diffusion direction and its uncertainty (dispersion)""" - + _cmd = "make_dyadic_vectors" input_spec = MakeDyadicVectorsInputSpec output_spec = MakeDyadicVectorsOutputSpec @@ -834,5 +834,5 @@ def _list_outputs(self): outputs = self.output_spec().get() outputs["dyads"] = self._gen_fname(self.inputs.output) outputs["dispersion"] = self._gen_fname(self.inputs.output, suffix="_dispersion") - - return outputs + + return outputs diff --git a/nipype/interfaces/fsl/maths.py b/nipype/interfaces/fsl/maths.py index 5450885e7f..e7c0f50797 100644 --- a/nipype/interfaces/fsl/maths.py +++ b/nipype/interfaces/fsl/maths.py @@ -13,7 +13,7 @@ class MathsInput(FSLCommandInputSpec): - + in_file = File(position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on") out_file = File(genfile=True, position=-2, argstr="%s", desc="image to write") @@ -57,7 +57,7 @@ class ChangeDataTypeInput(MathsInput): class ChangeDataType(MathsCommand): """Use fslmaths to change the datatype of an image. - + """ input_spec = ChangeDataTypeInput _suffix = "_chdt" @@ -74,7 +74,7 @@ class ThresholdInputSpec(MathsInput): class Threshold(MathsCommand): """Use fslmaths to apply a threshold to an image in a variety of ways. - + """ input_spec = ThresholdInputSpec _suffix = "_thresh" @@ -166,7 +166,7 @@ def _format_arg(self, name, spec, value): return super(DilateImage, self)._format_arg(name, spec, value) class ErodeInput(KernelInput): - + minimum_filter = traits.Bool(argstr="%s", position=5, usedefault=True, default_value=False, desc="if true, minimum filter rather than erosion by zeroing-out") @@ -191,13 +191,13 @@ class SpatialFilterInput(KernelInput): class SpatialFilter(MathsCommand): """Use fslmaths to spatially filter an image. - + """ input_spec = SpatialFilterInput _suffix = "_filt" class UnaryMathsInput(MathsInput): - + operation = traits.Enum("exp", "log", "sin", "cos", "sqr", "sqrt", "recip", "abs", "bin", "index", argstr="-%s", position=3, mandatory=True, desc="operation to perform") @@ -258,7 +258,7 @@ def _format_arg(self, name, spec, value): return super(MultiImageMaths, self)._format_arg(name, spec, value) class TemporalFilterInput(MathsInput): - + lowpass_sigma = traits.Float(-1, argstr="%.6f", position=4, usedefault=True, desc="lowpass filter sigma (in volumes)") highpass_sigma = traits.Float(-1, argstr="-bptf %.6f", position=3, usedefault=True, diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index 51ee15d443..2c9e0e3635 100755 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -82,7 +82,7 @@ class Level1Design(BaseInterface): Examples -------- - + >>> level1design = Level1Design() >>> level1design.inputs.interscan_interval = 2.5 >>> level1design.inputs.bases = {'dgamma':{'derivs': False}} @@ -475,7 +475,7 @@ class FILMGLS(FSLCommand): Examples -------- - + Initialize with no options, assigning them when calling run: >>> from nipype.interfaces import fsl @@ -503,7 +503,7 @@ class FILMGLS(FSLCommand): _cmd = 'film_gls' input_spec = FILMGLSInputSpec output_spec = FILMGLSOutputSpec - + def _get_pe_files(self, cwd): files = None if isdefined(self.inputs.design_file): @@ -638,7 +638,7 @@ class FLAMEO(FSLCommand): Examples -------- - + Initialize FLAMEO with no options, assigning them when calling run: >>> from nipype.interfaces import fsl @@ -760,7 +760,7 @@ class ContrastMgr(FSLCommand): _cmd = 'contrast_mgr' input_spec = ContrastMgrInputSpec output_spec = ContrastMgrOutputSpec - + def _run_interface(self, runtime): # The returncode is meaningless in ContrastMgr. So check the output # in stderr and if it's set, then update the returncode @@ -949,7 +949,7 @@ class MultipleRegressDesign(BaseInterface): FSL does not demean columns for higher level analysis. Please see `FSL documentation `_ - for more details on model specification for higher level analysis. + for more details on model specification for higher level analysis. Examples -------- @@ -959,7 +959,7 @@ class MultipleRegressDesign(BaseInterface): >>> model.inputs.contrasts = [['group mean','T',['reg1'],[1]]] >>> model.inputs.regressors = dict(reg1=[1,1,1],reg2=[2.,-4,3]) >>> model.run() # doctest: +SKIP - + """ input_spec = MultipleRegressDesignInputSpec @@ -1009,7 +1009,7 @@ def _run_interface(self, runtime): for idx in sorted(tconmap.keys()): convals = np.zeros((nwaves,1)) for regidx, reg in enumerate(self.inputs.contrasts[idx][2]): - convals[regs.index(reg)] = self.inputs.contrasts[idx][3][regidx] + convals[regs.index(reg)] = self.inputs.contrasts[idx][3][regidx] con_txt.append(' '.join(['%e'%val for val in convals])) con_txt = '\n'.join(con_txt) # write f-con file @@ -1175,7 +1175,7 @@ class MELODIC(FSLCommand): >>> melodic_setup.inputs.out_dir = 'groupICA.out' >>> melodic_setup.run() # doctest: +SKIP - + """ input_spec = MELODICInputSpec output_spec = MELODICOutputSpec @@ -1189,11 +1189,11 @@ def _list_outputs(self): if isdefined(self.inputs.report) and self.inputs.report: outputs['report_dir'] = os.path.join(self._gen_filename("out_dir"), "report") return outputs - + def _gen_filename(self, name): if name == "out_dir": return os.getcwd() - + class SmoothEstimateInputSpec(FSLCommandInputSpec): dof = traits.Int(argstr='--dof=%d', mandatory=True, @@ -1219,13 +1219,13 @@ class SmoothEstimate(FSLCommand): Examples -------- - + >>> est = SmoothEstimate() >>> est.inputs.zstat_file = 'zstat1.nii.gz' >>> est.inputs.mask_file = 'mask.nii' >>> est.cmdline 'smoothest --mask=mask.nii --zstat=zstat1.nii.gz' - + """ input_spec = SmoothEstimateInputSpec @@ -1308,25 +1308,25 @@ class ClusterOutputSpec(TraitedSpec): max_file = File(desc='filename for output of max image') mean_file = File(desc='filename for output of mean image') pval_file = File(desc='filename for image output of log pvals') - + class Cluster(FSLCommand): """ Uses FSL cluster to perform clustering on statistical output Examples -------- - + >>> cl = Cluster() >>> cl.inputs.threshold = 2.3 >>> cl.inputs.in_file = 'zstat1.nii.gz' >>> cl.inputs.out_localmax_txt_file = 'stats.txt' >>> cl.cmdline 'cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000' - + """ input_spec = ClusterInputSpec output_spec = ClusterOutputSpec _cmd = 'cluster' - + filemap = {'out_index_file':'index', 'out_threshold_file':'threshold', 'out_localmax_txt_file': 'localmax.txt', 'out_localmax_vol_file': 'localmax', @@ -1359,8 +1359,8 @@ def _format_arg(self, name, spec, value): fname = value return spec.argstr % fname return super(Cluster, self)._format_arg(name, spec, value) - -class RandomiseInputSpec(FSLCommandInputSpec): + +class RandomiseInputSpec(FSLCommandInputSpec): in_file = File(exists=True,desc = '4D input file',argstr='-i %s', position=0, mandatory=True) base_name = traits.Str('tbss_',desc = 'the rootname that all generated files will have', argstr='-o %s', position=1, usedefault=True) @@ -1368,7 +1368,7 @@ class RandomiseInputSpec(FSLCommandInputSpec): tcon = File(exists=True,desc = 't contrasts file',argstr='-t %s', position=3, mandatory=True) fcon = File(exists=True,desc = 'f contrasts file',argstr='-f %s') mask = File(exists=True,desc = 'mask image',argstr='-m %s') - x_block_labels = File(exists=True,desc = 'exchangeability block labels file',argstr='-e %s') + x_block_labels = File(exists=True,desc = 'exchangeability block labels file',argstr='-e %s') demean = traits.Bool(desc = 'demean data temporally before model fitting', argstr='-D') one_sample_group_mean = traits.Bool(desc = 'perform 1-sample group-mean test instead of generic permutation test', argstr='-l') @@ -1381,27 +1381,27 @@ class RandomiseInputSpec(FSLCommandInputSpec): tfce = traits.Bool(desc = 'carry out Threshold-Free Cluster Enhancement', argstr='-T') tfce2D = traits.Bool(desc = 'carry out Threshold-Free Cluster Enhancement with 2D optimisation', argstr='--T2') - f_only = traits.Bool(desc = 'calculate f-statistics only', argstr='--f_only') + f_only = traits.Bool(desc = 'calculate f-statistics only', argstr='--f_only') raw_stats_imgs = traits.Bool(desc = 'output raw ( unpermuted ) statistic images', argstr='-R') p_vec_n_dist_files = traits.Bool(desc = 'output permutation vector and null distribution text files', argstr='-P') num_perm = traits.Int(argstr='-n %d', desc='number of permutations (default 5000, set to 0 for exhaustive)') seed = traits.Int(argstr='--seed %d', desc='specific integer seed for random number generator') - var_smooth = traits.Int(argstr='-v %d', desc='use variance smoothing (std is in mm)') + var_smooth = traits.Int(argstr='-v %d', desc='use variance smoothing (std is in mm)') c_thresh = traits.Float(argstr='-c %.2f', desc='carry out cluster-based thresholding') cm_thresh = traits.Float(argstr='-C %.2f', desc='carry out cluster-mass-based thresholding') f_c_thresh = traits.Float(argstr='-F %.2f', desc='carry out f cluster thresholding') - f_cm_thresh = traits.Float(argstr='-S %.2f', desc='carry out f cluster-mass thresholding') + f_cm_thresh = traits.Float(argstr='-S %.2f', desc='carry out f cluster-mass thresholding') tfce_H = traits.Float(argstr='--tfce_H %.2f', desc='TFCE height parameter (default=2)') tfce_E = traits.Float(argstr='--tfce_E %.2f', desc='TFCE extent parameter (default=0.5)') - tfce_C = traits.Float(argstr='--tfce_C %.2f', desc='TFCE connectivity (6 or 26; default=6)') + tfce_C = traits.Float(argstr='--tfce_C %.2f', desc='TFCE connectivity (6 or 26; default=6)') vxl = traits.List(traits.Int,argstr='--vxl %d', desc='list of numbers indicating voxelwise EVs'+ 'position in the design matrix (list order corresponds to files in vxf option)') vxf = traits.List(traits.Int,argstr='--vxf %d', desc='list of 4D images containing voxelwise EVs'+ '(list order corresponds to numbers in vxl option)') - + class RandomiseOutputSpec(TraitedSpec): - tstat1_file = File(exists=True,desc = 'path/name of tstat image corresponding to the first t contrast') + tstat1_file = File(exists=True,desc = 'path/name of tstat image corresponding to the first t contrast') class Randomise(FSLCommand): """XXX UNSTABLE DO NOT USE @@ -1409,7 +1409,7 @@ class Randomise(FSLCommand): FSL Randomise: feeds the 4D projected FA data into GLM modelling and thresholding in order to find voxels which correlate with your model - + Example ------- >>> import nipype.interfaces.fsl as fsl @@ -1419,14 +1419,14 @@ class Randomise(FSLCommand): design_mat='design.mat') >>> rand.cmdline 'randomise -i allFA.nii -o tbss_ -d design.mat -t design.con -m mask.nii' - + """ - + _cmd = 'randomise' input_spec = RandomiseInputSpec output_spec = RandomiseOutputSpec - - def _list_outputs(self): - outputs = self.output_spec().get() + + def _list_outputs(self): + outputs = self.output_spec().get() outputs['tstat1_file'] = self._gen_fname(self.inputs.base_name,suffix='_tstat1') return outputs diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index e35ae80745..eb6b898af2 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -763,8 +763,8 @@ class FNIRT(FSLCommand): _cmd = 'fnirt' input_spec = FNIRTInputSpec output_spec = FNIRTOutputSpec - - + + filemap = {'warped_file':'warped', 'field_file':'field', 'jacobian_file':'field_jacobian', @@ -801,7 +801,7 @@ def _format_arg(self, name, spec, value): if name in self.filemap.keys(): return spec.argstr % self._list_outputs()[name] return super(FNIRT, self)._format_arg(name, spec, value) - + def _gen_filename(self, name): if name in ['warped_file', 'log_file']: return self._list_outputs()[name] @@ -988,7 +988,7 @@ class SUSAN(FSLCommand): Examples -------- - + >>> from nipype.interfaces import fsl >>> from nipype.testing import example_data >>> print anatfile #doctest: +SKIP @@ -1104,17 +1104,17 @@ class FUGUE(FSLCommand): -------- Please insert examples for use of this command - + """ - + _cmd = 'fugue' input_spec = FUGUEInputSpec output_spec = FUGUEOutputSpec - + def __init__(self, **kwargs): super(FUGUE, self).__init__(**kwargs) warn('This interface has not been fully tested. Please report any failures.') - + def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.unwarped_file @@ -1179,18 +1179,18 @@ class PRELUDE(FSLCommand): Examples -------- - + Please insert examples for use of this command - + """ input_spec = PRELUDEInputSpec output_spec = PRELUDEOutputSpec _cmd = 'prelude' - + def __init__(self, **kwargs): super(PRELUDE, self).__init__(**kwargs) warn('This has not been fully tested. Please report any failures.') - + def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.unwrapped_phase_file diff --git a/nipype/interfaces/fsl/tests/test_base.py b/nipype/interfaces/fsl/tests/test_base.py index e8a058749e..306be2974c 100644 --- a/nipype/interfaces/fsl/tests/test_base.py +++ b/nipype/interfaces/fsl/tests/test_base.py @@ -8,7 +8,7 @@ from nipype.interfaces.base import InterfaceResult from nipype.interfaces.fsl import check_fsl, no_fsl - + @skipif(no_fsl)#skip if fsl not installed) def test_fslversion(): ver = fsl.Info.version() @@ -16,13 +16,13 @@ def test_fslversion(): # If ver is None, fsl is not installed ver = ver.split('.') yield assert_equal, ver[0], '4' - + @skipif(no_fsl)#skip if fsl not installed) def test_fsloutputtype(): types = fsl.Info.ftypes.keys() orig_out_type = fsl.Info.output_type() yield assert_true, orig_out_type in types - + def test_outputtype_to_ext(): for ftype, ext in fsl.Info.ftypes.items(): @@ -30,7 +30,7 @@ def test_outputtype_to_ext(): yield assert_equal, res, ext yield assert_raises, KeyError, fsl.Info.output_type_to_ext, 'JUNK' - + @skipif(no_fsl)#skip if fsl not installed) def test_FSLCommand(): # Most methods in FSLCommand are tested in the subclasses. Only diff --git a/nipype/interfaces/fsl/tests/test_dti.py b/nipype/interfaces/fsl/tests/test_dti.py index b8202d500d..8621506268 100644 --- a/nipype/interfaces/fsl/tests/test_dti.py +++ b/nipype/interfaces/fsl/tests/test_dti.py @@ -196,7 +196,7 @@ def create_files_in_directory(): nb.save(nb.Nifti1Image(img,np.eye(4),hdr), os.path.join(outdir,f)) return filelist, outdir, cwd - + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) @@ -232,7 +232,7 @@ def test_bedpostx2(): yield assert_equal, actualCmdline, desiredCmdline - + # test eddy_correct @skipif(no_fsl) def test_eddy_correct2(): @@ -389,7 +389,7 @@ def test_randomise2(): '6,7,8,9,3')} for name, settings in opt_map.items(): - rand4 = fsl.Randomise(input_4D='infile', output_rootname='root', + rand4 = fsl.Randomise(input_4D='infile', output_rootname='root', **{name: settings[1]}) yield assert_equal, rand4.cmdline, rand4.cmd + ' -i infile -o root ' \ + settings[0] @@ -465,15 +465,15 @@ def test_Randomise_parallel(): 'TFCE_extent_param': ('--tfce_E 0.50', 0.50), 'TFCE_connectivity': ('--tfce_C 0.30', 0.30), 'list_num_voxel_EVs_pos': ('--vxl ' \ - + repr([1, 2, 3, 4]), + + repr([1, 2, 3, 4]), repr([1, 2, 3, 4])), 'list_img_voxel_EVs': ('--vxf ' \ - + repr([6, 7, 8, 9, 3]), + + repr([6, 7, 8, 9, 3]), repr([6, 7, 8, 9, 3]))} for name, settings in opt_map.items(): - rand4 = fsl.Randomise_parallel(input_4D='infile', - output_rootname='root', + rand4 = fsl.Randomise_parallel(input_4D='infile', + output_rootname='root', **{name: settings[1]}) yield assert_equal, rand4.cmdline, rand4.cmd + ' -i infile -o root ' \ + settings[0] @@ -529,7 +529,7 @@ def test_Proj_thresh(): yield assert_equal, results.interface.inputs.threshold, 2 # test arguments for opt_map - # Proj_thresh doesn't have an opt_map{} + # Proj_thresh doesn't have an opt_map{} # test vec_reg @@ -588,7 +588,7 @@ def test_Vec_reg(): 'brainMask': ('-m mask', 'mask')} for name, settings in opt_map.items(): - vrg4 = fsl.VecReg(infile='infile', outfile='outfile', + vrg4 = fsl.VecReg(infile='infile', outfile='outfile', refVolName='MNI152', **{name: settings[1]}) yield assert_equal, vrg4.cmdline, vrg4.cmd + \ ' -i infile -o outfile -r MNI152 ' + settings[0] @@ -627,7 +627,7 @@ def test_tbss_skeleton(): skeletor = fsl.TractSkeleton() files, newdir, olddir = create_files_in_directory() - + # Test the underlying command yield assert_equal, skeletor.cmd, "tbss_skeleton" @@ -648,7 +648,7 @@ def test_tbss_skeleton(): # Now test the more complicated usage bones = fsl.TractSkeleton(in_file="a.nii", project_data=True) - + # This should error yield assert_raises, ValueError, bones.run @@ -656,7 +656,7 @@ def test_tbss_skeleton(): bones.inputs.threshold = 0.2 bones.inputs.distance_map = "b.nii" bones.inputs.data_file = "b.nii" # Even though that's silly - + # Now we get a command line yield assert_equal, bones.cmdline, \ "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s"%(Info.standard_image("LowerCingulum_1mm.nii.gz"), diff --git a/nipype/interfaces/fsl/tests/test_maths.py b/nipype/interfaces/fsl/tests/test_maths.py index 395f5dd0d8..297b80a47d 100644 --- a/nipype/interfaces/fsl/tests/test_maths.py +++ b/nipype/interfaces/fsl/tests/test_maths.py @@ -43,7 +43,7 @@ def clean_directory(testdir, origdir, ftype): def test_maths_base(): files, testdir, origdir, ftype = create_files_in_directory() - # Get some fslmaths + # Get some fslmaths maths = fsl.MathsCommand() # Test that we got what we wanted @@ -82,7 +82,7 @@ def test_maths_base(): def test_changedt(): files, testdir, origdir, ftype = create_files_in_directory() - # Get some fslmaths + # Get some fslmaths cdt = fsl.ChangeDataType() # Test that we got what we wanted @@ -141,7 +141,7 @@ def test_threshold(): # Clean up our mess clean_directory(testdir, origdir, ftype) - + @skipif(no_fsl) def test_meanimage(): @@ -168,7 +168,7 @@ def test_meanimage(): # Clean up our mess clean_directory(testdir, origdir, ftype) - + @skipif(no_fsl) def test_smooth(): files, testdir, origdir, ftype = create_files_in_directory() @@ -186,18 +186,18 @@ def test_smooth(): cmdline = "fslmaths a.nii -s %.5f b.nii" for val in [0,1.,1,25,0.5,8/3]: smoother = fsl.IsotropicSmooth(in_file="a.nii",out_file="b.nii",sigma=val) - yield assert_equal, smoother.cmdline, cmdline%val + yield assert_equal, smoother.cmdline, cmdline%val smoother = fsl.IsotropicSmooth(in_file="a.nii",out_file="b.nii",fwhm=val) val = float(val)/np.sqrt(8 * np.log(2)) yield assert_equal, smoother.cmdline, cmdline%val - + # Test automatic naming smoother = fsl.IsotropicSmooth(in_file="a.nii", sigma=5) yield assert_equal, smoother.cmdline, "fslmaths a.nii -s %.5f %s"%(5, os.path.join(testdir, "a_smooth.nii")) # Clean up our mess clean_directory(testdir, origdir, ftype) - + @skipif(no_fsl) def test_mask(): files, testdir, origdir, ftype = create_files_in_directory() @@ -256,14 +256,14 @@ def test_dilation(): diller.inputs.kernel_size = Undefined diller.inputs.kernel_file = "kernel.txt" yield assert_equal, diller.cmdline, "fslmaths a.nii -kernel file kernel.txt -dilF b.nii" - + # Test that we don't need to request an out name dil = fsl.DilateImage(in_file="a.nii", operation="max") yield assert_equal, dil.cmdline, "fslmaths a.nii -dilF %s"%os.path.join(testdir, "a_dil.nii") # Clean up our mess clean_directory(testdir, origdir, ftype) - + @skipif(no_fsl) def test_erosion(): files, testdir, origdir, ftype = create_files_in_directory() @@ -337,7 +337,7 @@ def test_unarymaths(): for op in ops: maths = fsl.UnaryMaths(in_file="a.nii", operation=op) yield assert_equal, maths.cmdline, "fslmaths a.nii -%s %s"%(op, os.path.join(testdir, "a_%s.nii"%op)) - + # Clean up our mess clean_directory(testdir, origdir, ftype) @@ -372,7 +372,7 @@ def test_binarymaths(): for op in ops: maths = fsl.BinaryMaths(in_file="a.nii", operation=op, operand_file="b.nii") yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii %s"%(op,os.path.join(testdir,"a_maths.nii")) - + # Clean up our mess clean_directory(testdir, origdir, ftype) @@ -403,7 +403,7 @@ def test_multimaths(): maths = fsl.MultiImageMaths(in_file="a.nii", op_string="-add %s -mul 5", operand_files=["b.nii"]) yield assert_equal, maths.cmdline, \ "fslmaths a.nii -add b.nii -mul 5 %s"%os.path.join(testdir,"a_maths.nii") - + # Clean up our mess clean_directory(testdir, origdir, ftype) @@ -429,10 +429,10 @@ def test_tempfilt(): yield assert_equal, filt.cmdline, "fslmaths a.nii -bptf %.6f %.6f b.nii"%win # Test that we don't need to ask for an out file - filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma = 64) + filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma = 64) yield assert_equal, filt.cmdline, \ "fslmaths a.nii -bptf 64.000000 -1.000000 %s"%os.path.join(testdir,"a_filt.nii") - + # Clean up our mess clean_directory(testdir, origdir, ftype) diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index 4334f2c103..cebe451a1e 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -94,7 +94,7 @@ def func(): realcmd = ' '.join([better.cmd, tmp_infile, outpath, settings[0]]) yield assert_equal, better.cmdline, realcmd teardown_infile(tmp_dir) - + # test fast @skipif(no_fsl) def test_fast(): @@ -155,7 +155,7 @@ def test_fast(): settings[0], "-S 1 %s"%tmp_infile]) teardown_infile(tmp_dir) -@skipif(no_fsl) +@skipif(no_fsl) def setup_flirt(): ext = Info.output_type_to_ext(Info.output_type()) tmpdir = tempfile.mkdtemp() @@ -170,7 +170,7 @@ def teardown_flirt(tmpdir): def test_flirt(): # setup tmpdir, infile, reffile = setup_flirt() - + flirter = fsl.FLIRT() yield assert_equal, flirter.cmd, 'flirt' @@ -178,7 +178,7 @@ def test_flirt(): flirter.inputs.cost = 'mutualinfo' flirted = fsl.FLIRT(in_file=infile, reference=reffile, - out_file='outfile', out_matrix_file='outmat.mat', + out_file='outfile', out_matrix_file='outmat.mat', bins = 256, cost = 'mutualinfo') flirt_est = fsl.FLIRT(in_file=infile, reference=reffile, @@ -269,7 +269,7 @@ def test_flirt(): @skipif(no_fsl) def test_mcflirt(): tmpdir, infile, reffile = setup_flirt() - + frt = fsl.MCFLIRT() yield assert_equal, frt.cmd, 'mcflirt' # Test generated outfile name @@ -306,7 +306,7 @@ def test_mcflirt(): for name, settings in opt_map.items(): fnt = fsl.MCFLIRT(in_file = infile, **{name : settings[1]}) - instr = '-in %s'%(infile) + instr = '-in %s'%(infile) outstr = '-out %s'%(outfile) if name in ('init', 'cost', 'dof','mean_vol','bins'): yield assert_equal, fnt.cmdline, ' '.join([fnt.cmd, @@ -318,7 +318,7 @@ def test_mcflirt(): instr, outstr, settings[0]]) - + # Test error is raised when missing required args fnt = fsl.MCFLIRT() @@ -351,7 +351,7 @@ def test_fnirt(): cmd = 'fnirt --in=%s '\ '--logout=%s'\ ' %s=%s --ref=%s'\ - ' --iout=%s' % (infile, log, + ' --iout=%s' % (infile, log, flag, strval, reffile, iout) elif item in ('in_fwhm'): cmd = 'fnirt --in=%s %s=%s --logout=%s '\ @@ -362,14 +362,14 @@ def test_fnirt(): '--in=%s '\ '--logout=%s '\ '--ref=%s --iout=%s' % (flag,strval, - infile, log, + infile, log, reffile, iout) else: cmd = 'fnirt '\ '--in=%s --logout=%s '\ - '--ref=%s %s=%s --iout=%s' % (infile, log, + '--ref=%s %s=%s --iout=%s' % (infile, log, reffile, flag, strval, iout) @@ -399,43 +399,43 @@ def test_fnirt(): fnirt = fsl.FNIRT(in_file = infile, ref_file = reffile, **{name : infile}) - + if name in ('config_file', 'affine_file','field_file'): cmd = 'fnirt %s%s --in=%s '\ '--logout=%s '\ - '--ref=%s --iout=%s' % (settings, infile, infile, log, + '--ref=%s --iout=%s' % (settings, infile, infile, log, reffile, iout) elif name in ('refmask_file'): cmd = 'fnirt --in=%s '\ '--logout=%s --ref=%s '\ '%s%s '\ - '--iout=%s' % (infile, log, - reffile, + '--iout=%s' % (infile, log, + reffile, settings,infile, iout) elif name in ('in_intensitymap_file', 'inwarp_file', 'inmask_file', 'jacobian_file'): cmd = 'fnirt --in=%s '\ '%s%s '\ '--logout=%s --ref=%s '\ - '--iout=%s' % (infile, + '--iout=%s' % (infile, settings,infile, - log, - reffile, + log, + reffile, iout) elif name in ('log_file'): cmd = 'fnirt --in=%s '\ '%s%s --ref=%s '\ - '--iout=%s' % (infile, - settings,infile, - reffile, + '--iout=%s' % (infile, + settings,infile, + reffile, iout) else: cmd = 'fnirt --in=%s '\ '--logout=%s %s%s '\ - '--ref=%s --iout=%s' % (infile,log, + '--ref=%s --iout=%s' % (infile,log, settings, infile, reffile,iout) - + yield assert_equal, fnirt.cmdline, cmd teardown_flirt(tmpdir) diff --git a/nipype/interfaces/fsl/tests/test_utils.py b/nipype/interfaces/fsl/tests/test_utils.py index c1e7a415e3..97d5b89617 100644 --- a/nipype/interfaces/fsl/tests/test_utils.py +++ b/nipype/interfaces/fsl/tests/test_utils.py @@ -27,7 +27,7 @@ def create_files_in_directory(): nb.save(nb.Nifti1Image(img,np.eye(4),hdr), os.path.join(outdir,f)) return filelist, outdir, cwd - + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) @@ -135,7 +135,7 @@ def no_fsl(): """Checks if FSL is NOT installed used with skipif to skip tests that will fail if FSL is not installed""" - + if fsl.Info().version() == None: return True else: @@ -144,7 +144,7 @@ def no_fsl(): @skipif(no_fsl) def test_fslroi(): filelist, outdir, cwd = create_files_in_directory() - + roi = fsl.ExtractROI() # make sure command gets called @@ -265,7 +265,7 @@ def test_slicer(): clean_directory(outdir, cwd) def create_parfiles(): - + np.savetxt('a.par',np.random.rand(6,3)) np.savetxt('b.par',np.random.rand(6,3)) return ['a.par', 'b.par'] diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 33723d283b..d9eac1a172 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -133,7 +133,7 @@ def _list_outputs(self): suffix = '_merged') else: outputs['merged_file'] = os.path.realpath(self.inputs.merged_file) - + return outputs def _gen_filename(self, name): @@ -281,10 +281,10 @@ class ImageMaths(FSLCommand): """Use FSL fslmaths command to allow mathematical manipulation of images `FSL info `_ - + Examples -------- - + >>> from nipype.interfaces import fsl >>> from nipype.testing import anatfile >>> maths = fsl.ImageMaths(in_file=anatfile, op_string= '-add 5', \ @@ -336,7 +336,7 @@ class FilterRegressorOutputSpec(TraitedSpec): class FilterRegressor(FSLCommand): """Data de-noising by regressing out part of a design matrix - + Uses simple OLS regression on 4D images """ input_spec = FilterRegressorInputSpec @@ -385,10 +385,10 @@ class ImageStats(FSLCommand): """Use FSL fslstats command to calculate stats from images `FSL info `_ - + Examples -------- - + >>> from nipype.interfaces.fsl import ImageStats >>> from nipype.testing import funcfile >>> stats = ImageStats(in_file=funcfile, op_string= '-M') @@ -411,7 +411,7 @@ def _format_arg(self, name, trait_spec, value): else: raise ValueError('-k %s option in op_string requires mask_file') return super(ImageStats, self)._format_arg(name, trait_spec, value) - + def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() # local caching for backward compatibility @@ -561,7 +561,7 @@ class SlicerInputSpec(FSLCommandInputSpec): image_width = traits.Int(position=-2, argstr='%d',desc='max picture width') out_file = File(position=-1, genfile=True, argstr='%s', desc='picture to write') scaling = traits.Float(position=0, argstr='-s %f',desc='image scale') - + class SlicerOutputSpec(TraitedSpec): out_file = File(exists=True, desc='picture to write') @@ -638,7 +638,7 @@ class PlotTimeSeriesInputSpec(FSLCommandInputSpec): out_file = File(argstr="-o %s",genfile=True,desc="image to write") class PlotTimeSeriesOutputSpec(TraitedSpec): - + out_file = File(exists=True, desc='image to write') class PlotTimeSeries(FSLCommand): @@ -732,9 +732,9 @@ class PlotMotionParams(FSLCommand): ----- The 'in_source' attribute determines the order of columns that are expected in the source file. FSL prints motion parameters in the order rotations, translations, - while SPM prints them in the opposite order. This interface should be able to + while SPM prints them in the opposite order. This interface should be able to plot timecourses of motion parameters generated from other sources as long as - they fall under one of these two patterns. For more flexibilty, see the + they fall under one of these two patterns. For more flexibilty, see the :class:`fsl.PlotTimeSeries` interface. """ @@ -746,7 +746,7 @@ def _format_arg(self, name, spec, value): if name == "plot_type": source = self.inputs.in_source - + if self.inputs.plot_type == 'displacement': title='-t \'MCFLIRT estimated mean displacement (mm)\'' labels = '-a abs,rel' @@ -754,14 +754,14 @@ def _format_arg(self, name, spec, value): # Get the right starting and ending position depending on source package sfdict = dict(fsl_rot=(1,3),fsl_tra=(4,6),spm_rot=(4,6),spm_tra=(1,3)) - + # Format the title properly sfstr = "--start=%d --finish=%d"%sfdict["%s_%s"%(source, value[:3])] titledict = dict(fsl="MCFLIRT",spm="Realign") unitdict = dict(rot="radians",tra="mm") - + title = "\'%s estimated %s (%s)\'"%(titledict[source],value,unitdict[value[:3]]) - + return "-t %s %s -a x,y,z"%(title, sfstr) elif name == "plot_size": return "-h %d -w %d"%value @@ -816,7 +816,7 @@ class ConvertXFMOutputSpec(TraitedSpec): class ConvertXFM(FSLCommand): """Use the FSL utility convert_xfm to modify FLIRT transformation matrices. - + Examples -------- >>> import nipype.interfaces.fsl as fsl @@ -903,7 +903,7 @@ def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None - + class PowerSpectrumInputSpec(FSLCommandInputSpec): # We use position args here as list indices - so a negative number # will put something on the end @@ -912,7 +912,7 @@ class PowerSpectrumInputSpec(FSLCommandInputSpec): argstr='%s', position=0, mandatory=True) out_file = File(desc = 'name of output 4D file for power spectrum', argstr='%s', position=1, genfile=True) - + class PowerSpectrumOutputSpec(TraitedSpec): out_file = File(exists=True, desc="path/name of the output 4D power spectrum file") diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index c1214dd60e..8c0c0ea6c0 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -95,7 +95,7 @@ def add_traits(base, names, trait_type=None): return base class IOBase(BaseInterface): - + def _run_interface(self,runtime): return runtime @@ -1051,15 +1051,15 @@ class SQLiteSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class SQLiteSink(IOBase): """Very simple frontend for storing values into SQLite database. input_names correspond to input_names. - + Notes ----- Unlike most nipype-nodes this is not a thread-safe node because it can - write to a common shared location. When run in parallel it will + write to a common shared location. When run in parallel it will occasionally crash. - - + + Examples -------- @@ -1069,12 +1069,12 @@ class SQLiteSink(IOBase): >>> sql.inputs.subject_id = 's1' >>> sql.inputs.some_measurement = 11.4 >>> sql.run() # doctest: +SKIP - + """ input_spec = SQLiteSinkInputSpec - + def __init__(self, input_names, **inputs): - + super(SQLiteSink, self).__init__(**inputs) self._input_names = filename_to_list(input_names) @@ -1085,7 +1085,7 @@ def _list_outputs(self): """ conn = sqlite3.connect(self.inputs.database_file, check_same_thread = False) c = conn.cursor() - c.execute("INSERT OR REPLACE INTO %s ("%self.inputs.table_name + ",".join(self._input_names) + ") VALUES (" + ",".join(["?"]*len(self._input_names)) + ")", + c.execute("INSERT OR REPLACE INTO %s ("%self.inputs.table_name + ",".join(self._input_names) + ") VALUES (" + ",".join(["?"]*len(self._input_names)) + ")", [getattr(self.inputs,name) for name in self._input_names]) conn.commit() c.close() diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index c992ca6d41..650d5ad6be 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -9,7 +9,7 @@ class MatlabInputSpec(CommandLineInputSpec): """ Basic expected inputs to Matlab interface """ - + script = traits.Str(argstr='-r \"%s;exit\"', desc='m-code to run', mandatory=True, position=-1) uses_mcr = traits.Bool(desc='use MCR interface', @@ -52,7 +52,7 @@ class MatlabCommand(CommandLine): _default_mfile = None _default_paths = None input_spec = MatlabInputSpec - + def __init__(self, matlab_cmd = None, **inputs): """initializes interface to matlab (default 'matlab -nodesktop -nosplash') @@ -62,18 +62,18 @@ def __init__(self, matlab_cmd = None, **inputs): self._cmd = matlab_cmd elif self._default_matlab_cmd: self._cmd = self._default_matlab_cmd - + if self._default_mfile and not isdefined(self.inputs.mfile): self.inputs.mfile = self._default_mfile - + if self._default_paths and not isdefined(self.inputs.paths): self.inputs.paths = self._default_paths - + if not isdefined(self.inputs.single_comp_thread) and \ not isdefined(self.inputs.uses_mcr): if config.getboolean('execution','single_thread_matlab'): self.inputs.single_comp_thread = True - + @classmethod def set_default_matlab_cmd(cls, matlab_cmd): """Set the default MATLAB command line for MATLAB classes. @@ -84,7 +84,7 @@ def set_default_matlab_cmd(cls, matlab_cmd): .inputs.matlab_cmd. """ cls._default_matlab_cmd = matlab_cmd - + @classmethod def set_default_mfile(cls, mfile): """Set the default MATLAB script file format for MATLAB classes. @@ -95,7 +95,7 @@ def set_default_mfile(cls, mfile): .inputs.mfile. """ cls._default_mfile = mfile - + @classmethod def set_default_paths(cls, paths): """Set the default MATLAB paths for MATLAB classes. @@ -130,7 +130,7 @@ def _gen_matlab_command(self, argstr, script_lines): # prescript prescript = self.inputs.prescript postscript = self.inputs.postscript - + #postcript takes different default value depending on the mfile argument if mfile: prescript.insert(0,"fprintf(1,'Executing %s at %s:\\n',mfilename,datestr(now));") @@ -138,7 +138,7 @@ def _gen_matlab_command(self, argstr, script_lines): prescript.insert(0,"fprintf(1,'Executing code at %s:\\n',datestr(now));") for path in paths: prescript.append("addpath('%s');\n" % path) - + if not mfile: #clean up the code of comments and replace newlines with commas script_lines = ','.join([line for line in script_lines.split("\n") if not line.strip().startswith("%")]) diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index 2842e825bd..9fbaf97291 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -23,17 +23,17 @@ class ComputeMaskInputSpec(BaseInterfaceInputSpec): mean volume is used.") m = traits.Float(desc="lower fraction of the histogram to be discarded") M = traits.Float(desc="upper fraction of the histogram to be discarded") - cc = traits.Bool(desc="if True, only the largest connect component is kept") - + cc = traits.Bool(desc="if True, only the largest connect component is kept") + class ComputeMaskOutputSpec(TraitedSpec): brain_mask = File(exists=True) - + class ComputeMask(BaseInterface): input_spec = ComputeMaskInputSpec output_spec = ComputeMaskOutputSpec - + def _run_interface(self, runtime): - + args = {} for key in [k for k,_ in self.inputs.items() if k not in BaseInterfaceInputSpec().trait_names()]: value = getattr(self.inputs, key) @@ -42,14 +42,14 @@ def _run_interface(self, runtime): nii = nb.load(value) value = nii.get_data() args[key] = value - + brain_mask = compute_mask(**args) - + self._brain_mask_path = os.path.abspath("brain_mask.nii") nb.save(nb.Nifti1Image(brain_mask.astype(np.uint8), nii.get_affine()), self._brain_mask_path) - + return runtime - + def _list_outputs(self): outputs = self._outputs().get() outputs["brain_mask"] = self._brain_mask_path diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index 33241a0e5b..b9eff8a364 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -2,10 +2,10 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """ -Interfaces to functionality from nitime for time-series analysis of fmri data +Interfaces to functionality from nitime for time-series analysis of fmri data -- nitime.analysis.CoherenceAnalyzer: Coherence/y -- nitime.fmri.io: +- nitime.analysis.CoherenceAnalyzer: Coherence/y +- nitime.fmri.io: - nitime.viz.drawmatrix_channels """ @@ -44,7 +44,7 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): 'time-points on the rows. ROI names at the top row'), exists=True, requires=('TR',)) - + #If you gave just a file name, you need to specify the sampling_rate: TR = traits.Float(desc=('The TR used to collect the data', 'in your csv file ')) @@ -59,7 +59,7 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): n_overlap = traits.Range(low=0,value=0,usedefault=True, desc=('The number of samples which overlap', 'between subsequent windows.(Defaults to 0)')) - + frequency_range = traits.List(value=[0.02, 0.15],usedefault=True, minlen=2, maxlen=2, @@ -68,7 +68,7 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): '[low,high] (Default [0.02,0.15]')) output_csv_file = File(desc='File to write outputs (coherence,time-delay) with file-names: file_name_ {coherence,timedelay}') - + output_figure_file = File(desc='File to write output figures (coherence,time-delay) with file-names: file_name_{coherence,timedelay}. Possible formats: .png,.svg,.pdf,.jpg,...') figure_type = traits.Enum('matrix','network',usedefault=True, @@ -76,11 +76,11 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): "'matrix' denotes a matrix image and", "'network' denotes a graph representation.", " Default: 'matrix'")) - + class CoherenceAnalyzerOutputSpec(TraitedSpec): coherence_array = traits.Array(desc=('The pairwise coherence values', 'between the ROIs')) - + timedelay_array = traits.Array(desc=('The pairwise time delays between the', 'ROIs (in seconds)')) @@ -93,7 +93,7 @@ class CoherenceAnalyzerOutputSpec(TraitedSpec): coherence_fig = File(desc = ('Figure representing coherence values')) timedelay_fig = File(desc = ('Figure representing coherence values')) - + class CoherenceAnalyzer(BaseInterface): input_spec = CoherenceAnalyzerInputSpec @@ -108,7 +108,7 @@ def _read_csv(self): the rest of the data will be read in and transposed so that the rows (TRs) will becomes the second (and last) dimension of the array - + """ #Check that input conforms to expectations: first_row = open(self.inputs.in_file).readline() @@ -118,22 +118,22 @@ def _read_csv(self): roi_names = open(self.inputs.in_file).readline().replace('\"','').strip('\n').split(',') #Transpose, so that the time is the last dimension: data = np.loadtxt(self.inputs.in_file,skiprows=1,delimiter=',').T - + return data,roi_names - + def _csv2ts(self): """ Read data from the in_file and generate a nitime TimeSeries object""" data,roi_names = self._read_csv() - + TS = TimeSeries(data=data, sampling_interval=self.inputs.TR, time_unit='s') - + TS.metadata = dict(ROIs=roi_names) return TS - - + + #Rewrite _run_interface, but not run def _run_interface(self, runtime): lb, ub = self.inputs.frequency_range @@ -141,7 +141,7 @@ def _run_interface(self, runtime): if self.inputs.in_TS is Undefined: # get TS form csv and inputs.TR TS = self._csv2ts() - + else: # get TS from inputs.in_TS TS = self.inputs.in_TS @@ -151,7 +151,7 @@ def _run_interface(self, runtime): self.ROIs=['roi_%d' % x for x,_ in enumerate(TS.data)] else: self.ROIs=TS.metadata['ROIs'] - + A = nta.CoherenceAnalyzer(TS, method=dict(this_method='welch', NFFT=self.inputs.NFFT, @@ -159,27 +159,27 @@ def _run_interface(self, runtime): freq_idx = np.where((A.frequencies>self.inputs.frequency_range[0]) * (A.frequenciesCA.inputs.frequency_range[0]) * (C.frequencies>> a = SPMCommand()._reformat_dict_for_savemat(dict(a=1,b=dict(c=2,d=3))) >>> print a [{'a': 1, 'b': [{'c': 2, 'd': 3}]}] - + """ newdict = {} try: @@ -301,7 +301,7 @@ def _reformat_dict_for_savemat(self, contents): # if value is None, skip else: newdict[key] = value - + return [newdict] except TypeError: print 'Requires dict input' @@ -312,12 +312,12 @@ def _generate_job(self, prefix='', contents=None): Parameters ---------- prefix : string - A string that needs to get + A string that needs to get contents : dict A non-tuple Python structure containing spm job information gets converted to an appropriate sequence of matlab commands. - + """ jobstring = '' if contents is None: @@ -362,7 +362,7 @@ def _generate_job(self, prefix='', contents=None): return jobstring jobstring += "%s = %s;\n" % (prefix,str(contents)) return jobstring - + def _make_matlab_command(self, contents, postscript=None): """Generates a mfile to build job structure Parameters @@ -379,7 +379,7 @@ def _make_matlab_command(self, contents, postscript=None): ------- mscript : string contents of a script called by matlab - + """ cwd = os.getcwd() mscript = """ @@ -391,7 +391,7 @@ def _make_matlab_command(self, contents, postscript=None): fprintf('SPM version: %s Release: %s\\n',name, ver); fprintf('SPM path: %s\\n',which('spm')); spm('Defaults','fMRI'); - + if strcmp(spm('ver'),'SPM8'), spm_jobman('initcfg');end\n """ if self.mlab.inputs.mfile: @@ -409,10 +409,10 @@ def _make_matlab_command(self, contents, postscript=None): (contents[0])}]}]} savemat(os.path.join(cwd,'pyjobs_%s.mat'%self.jobname), jobdef) mscript += "load pyjobs_%s;\n\n" % self.jobname - mscript += """ - if strcmp(spm('ver'),'SPM8'), + mscript += """ + if strcmp(spm('ver'),'SPM8'), jobs=spm_jobman('spm5tospm8',{jobs}); - end + end spm_jobman(\'run_nogui\',jobs);\n """ if postscript is not None: diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index d4af63e82c..887f9d20a3 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -34,17 +34,17 @@ class Level1DesignInputSpec(SPMCommandInputSpec): spm_mat_dir = Directory(exists=True, field='dir', desc='directory to store SPM.mat file (opt)') - timing_units = traits.Enum('secs', 'scans', field='timing.units', + timing_units = traits.Enum('secs', 'scans', field='timing.units', desc='units for specification of onsets', mandatory=True) - interscan_interval = traits.Float(field='timing.RT', + interscan_interval = traits.Float(field='timing.RT', desc='Interscan interval in secs', mandatory=True) microtime_resolution = traits.Int(field='timing.fmri_t', desc='Number of time-bins per scan in secs (opt)') microtime_onset = traits.Float(field='timing.fmri_t0', desc='The onset/time-bin in seconds for alignment (opt)') - session_info = traits.Any(field='sess', + session_info = traits.Any(field='sess', desc='Session specific information generated by ``modelgen.SpecifyModel``', mandatory=True) factor_info = traits.List(traits.Dict(traits.Enum('name','levels')), @@ -55,7 +55,7 @@ class Level1DesignInputSpec(SPMCommandInputSpec): name : string Name of basis function (hrf, fourier, fourier_han, gamma, fir) - + hrf : derivs : 2-element list Model HRF Derivatives. No derivatives: [0,0], @@ -84,19 +84,19 @@ class Level1DesignOutputSpec(TraitedSpec): class Level1Design(SPMCommand): """Generate an SPM design matrix - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=61 Examples -------- - + >>> level1design = Level1Design() >>> level1design.inputs.timing_units = 'secs' >>> level1design.inputs.interscan_interval = 2.5 >>> level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} >>> level1design.inputs.session_info = 'session_info.npz' >>> level1design.run() # doctest: +SKIP - + """ input_spec = Level1DesignInputSpec @@ -154,10 +154,10 @@ def _list_outputs(self): class EstimateModelInputSpec(SPMCommandInputSpec): - spm_mat_file = File(exists=True, field='spmmat', desc='absolute path to SPM.mat', + spm_mat_file = File(exists=True, field='spmmat', desc='absolute path to SPM.mat', copyfile=True, mandatory=True) - estimation_method = traits.Dict(traits.Enum('Classical', 'Bayesian2', 'Bayesian'), + estimation_method = traits.Dict(traits.Enum('Classical', 'Bayesian2', 'Bayesian'), field='method', desc='Classical, Bayesian2, Bayesian (dict)', mandatory=True) @@ -172,9 +172,9 @@ class EstimateModelOutputSpec(TraitedSpec): class EstimateModel(SPMCommand): """Use spm_spm to estimate the parameters of a model - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=71 - + Examples -------- >>> est = EstimateModel() @@ -226,8 +226,8 @@ def _list_outputs(self): return outputs class EstimateContrastInputSpec(SPMCommandInputSpec): - spm_mat_file = File(exists=True, field='spmmat', - desc='Absolute path to SPM.mat', + spm_mat_file = File(exists=True, field='spmmat', + desc='Absolute path to SPM.mat', copyfile=True, mandatory=True) contrasts = traits.List( @@ -257,11 +257,11 @@ class EstimateContrastInputSpec(SPMCommandInputSpec): contrasts, the condition list should contain previously defined T-contrasts.""", mandatory=True) - beta_images = InputMultiPath(File(exists=True), - desc='Parameter estimates of the design matrix', + beta_images = InputMultiPath(File(exists=True), + desc='Parameter estimates of the design matrix', copyfile=False, mandatory=True) - residual_image = File(exists=True, desc='Mean-squared image of the residuals', + residual_image = File(exists=True, desc='Mean-squared image of the residuals', copyfile=False, mandatory=True) use_derivs = traits.Bool(desc='use derivatives for estimation', @@ -289,7 +289,7 @@ class EstimateContrast(SPMCommand): >>> contrasts = [cont1,cont2] >>> est.inputs.contrasts = contrasts >>> est.run() # doctest: +SKIP - + """ input_spec = EstimateContrastInputSpec @@ -404,7 +404,7 @@ class Threshold(SPMCommand): '''Topological FDR thresholding based on cluster extent/size. Smoothness is estimated from GLM residuals but is assumed to be the same for all of the voxels. - + Examples -------- @@ -421,11 +421,11 @@ class Threshold(SPMCommand): def _gen_thresholded_map_filename(self): _, fname, ext = split_filename(self.inputs.stat_image) return os.path.abspath(fname + "_thr" + ext) - + def _gen_pre_topo_map_filename(self): _, fname, ext = split_filename(self.inputs.stat_image) return os.path.abspath(fname + "_pre_topo_thr" + ext) - + def _make_matlab_command(self, _): script = "con_index = %d;\n"%self.inputs.contrast_index script += "cluster_forming_thr = %f;\n"%self.inputs.height_threshold @@ -433,7 +433,7 @@ def _make_matlab_command(self, _): script += "thresDesc = 'FWE';\n" else: script += "thresDesc = 'none';\n" - + if self.inputs.use_topo_fdr: script += "use_topo_fdr = 1;\n" else: @@ -490,7 +490,7 @@ def _make_matlab_command(self, _): for i = 1:nclusters cluster_size = sum(voxel_labels==i); - if cluster_size > extent_threshold && (~use_topo_fdr || cluster_size >= uc) + if cluster_size > extent_threshold && (~use_topo_fdr || cluster_size >= uc) thresholded_XYZ = cat(2, thresholded_XYZ, XYZth(:,voxel_labels == i)); thresholded_Z = cat(2, thresholded_Z, Zth(voxel_labels == i)); end @@ -511,7 +511,7 @@ def _list_outputs(self): outputs['thresholded_map'] = self._gen_thresholded_map_filename() outputs['pre_topo_fdr_map'] = self._gen_pre_topo_map_filename() return outputs - + class ThresholdStatisticsInputSpec(SPMCommandInputSpec): spm_mat_file = File(exists=True, desc='absolute path to SPM.mat', copyfile=True, mandatory=True) stat_image = File(exists=True, desc='stat image', copyfile=False, mandatory=True) @@ -530,9 +530,9 @@ class ThresholdStatisticsOutputSpec(TraitedSpec): class ThresholdStatistics(SPMCommand): - '''Given height and cluster size threshold calculate theoretical probabilities + '''Given height and cluster size threshold calculate theoretical probabilities concerning false positives - + Examples -------- @@ -584,7 +584,7 @@ def _make_matlab_command(self, _): voxelwise_P_uncor = (1 - spm_Fcdf(cluster_forming_thr,df)).^n end VPs = sort(VPs); - + voxelwise_P_FDR = spm_P_FDR(cluster_forming_thr,df,STAT,n,VPs) V2R = 1/prod(FWHM(stat_map_vol.dim > 1)); @@ -656,13 +656,13 @@ class FactorialDesignOutputSpec(TraitedSpec): class FactorialDesign(SPMCommand): """Base class for factorial designs - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=79 - + """ input_spec = FactorialDesignInputSpec - output_spec = FactorialDesignOutputSpec + output_spec = FactorialDesignOutputSpec _jobtype = 'stats' _jobname = 'factorial_design' @@ -691,13 +691,13 @@ def _parse_inputs(self): if not isdefined(self.inputs.spm_mat_dir): einputs[0]['dir'] = np.array([str(os.getcwd())], dtype=object) return einputs - + def _list_outputs(self): outputs = self._outputs().get() spm = os.path.join(os.getcwd(), 'SPM.mat') outputs['spm_mat_file'] = spm return outputs - + class OneSampleTTestDesignInputSpec(FactorialDesignInputSpec): in_files = traits.List(File(exists=True), field='des.t1.scans', mandatory=True, minlen=2, @@ -708,14 +708,14 @@ class OneSampleTTestDesign(FactorialDesign): Examples -------- - + >>> ttest = OneSampleTTestDesign() >>> ttest.inputs.in_files = ['cont1.nii', 'cont2.nii'] >>> ttest.run() # doctest: +SKIP """ - + input_spec = OneSampleTTestDesignInputSpec - + def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ @@ -739,16 +739,16 @@ class TwoSampleTTestDesignInputSpec(FactorialDesignInputSpec): class TwoSampleTTestDesign(FactorialDesign): """Create SPM design for two sample t-test - + Examples -------- - + >>> ttest = TwoSampleTTestDesign() >>> ttest.inputs.group1_files = ['cont1.nii', 'cont2.nii'] >>> ttest.inputs.group2_files = ['cont1a.nii', 'cont2a.nii'] >>> ttest.run() # doctest: +SKIP """ - + input_spec = TwoSampleTTestDesignInputSpec def _format_arg(self, opt, spec, val): @@ -770,22 +770,22 @@ class PairedTTestDesignInputSpec(FactorialDesignInputSpec): class PairedTTestDesign(FactorialDesign): """Create SPM design for paired t-test - + Examples -------- - + >>> pttest = PairedTTestDesign() >>> pttest.inputs.paired_files = [['cont1.nii','cont1a.nii'],['cont2.nii','cont2a.nii']] >>> pttest.run() # doctest: +SKIP """ - + input_spec = PairedTTestDesignInputSpec def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ if opt in ['paired_files']: - return [dict(scans=np.array(files, dtype=object)) for files in val] + return [dict(scans=np.array(files, dtype=object)) for files in val] return super(PairedTTestDesign, self)._format_arg(opt, spec, val) class MultipleRegressionDesignInputSpec(FactorialDesignInputSpec): @@ -804,15 +804,15 @@ class MultipleRegressionDesignInputSpec(FactorialDesignInputSpec): class MultipleRegressionDesign(FactorialDesign): """Create SPM design for multiple regression - + Examples -------- - + >>> mreg = MultipleRegressionDesign() >>> mreg.inputs.in_files = ['cont1.nii','cont2.nii'] >>> mreg.run() # doctest: +SKIP """ - + input_spec = MultipleRegressionDesignInputSpec def _format_arg(self, opt, spec, val): diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index e86de9f378..1dd6c96427 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -21,7 +21,7 @@ # Local imports from nipype.interfaces.base import (OutputMultiPath, TraitedSpec, isdefined, traits, InputMultiPath, File) -from nipype.interfaces.spm.base import (SPMCommand, scans_for_fname, +from nipype.interfaces.spm.base import (SPMCommand, scans_for_fname, func_is_3d, scans_for_fnames, SPMCommandInputSpec) from nipype.utils.filemanip import (fname_presuffix, filename_to_list, @@ -54,7 +54,7 @@ class SliceTimingOutputSpec(TraitedSpec): class SliceTiming(SPMCommand): """Use spm to perform slice timing correction. - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=19 Examples @@ -69,12 +69,12 @@ class SliceTiming(SPMCommand): >>> st.inputs.slice_order = range(32,0,-1) >>> st.inputs.ref_slice = 1 >>> st.run() # doctest: +SKIP - + """ input_spec = SliceTimingInputSpec output_spec = SliceTimingOutputSpec - + _jobtype = 'temporal' _jobname = 'st' @@ -144,7 +144,7 @@ class RealignOutputSpec(TraitedSpec): class Realign(SPMCommand): """Use spm_realign for estimating within modality rigid body alignment - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=25 Examples @@ -201,7 +201,7 @@ def _list_outputs(self): first_image = self.inputs.in_files[0][0] else: first_image = self.inputs.in_files[0] - + outputs['mean_image'] = fname_presuffix(first_image, prefix='mean') outputs['realigned_files'] = [] for imgf in filename_to_list(self.inputs.in_files): @@ -255,18 +255,18 @@ class CoregisterOutputSpec(TraitedSpec): class Coregister(SPMCommand): """Use spm_coreg for estimating cross-modality rigid body alignment - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=39 Examples -------- - + >>> import nipype.interfaces.spm as spm >>> coreg = spm.Coregister() >>> coreg.inputs.target = 'functional.nii' >>> coreg.inputs.source = 'structural.nii' >>> coreg.run() # doctest: +SKIP - + """ input_spec = CoregisterInputSpec @@ -368,7 +368,7 @@ class NormalizeOutputSpec(TraitedSpec): class Normalize(SPMCommand): """use spm_normalise for warping an image to a template - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=51 Examples @@ -377,7 +377,7 @@ class Normalize(SPMCommand): >>> norm = spm.Normalize() >>> norm.inputs.source = 'functional.nii' >>> norm.run() # doctest: +SKIP - + """ input_spec = NormalizeInputSpec @@ -449,34 +449,34 @@ class SegmentInputSpec(SPMCommandInputSpec): data = InputMultiPath(File(exists=True), field='data', desc='one scan per subject', copyfile=False, mandatory=True) gm_output_type = traits.List(traits.Bool(), minlen=3, maxlen=3, field='output.GM', - desc="""Options to produce grey matter images: c1*.img, wc1*.img and mwc1*.img. - None: [False,False,False], - Native Space: [False,False,True], - Unmodulated Normalised: [False,True,False], - Modulated Normalised: [True,False,False], - Native + Unmodulated Normalised: [False,True,True], - Native + Modulated Normalised: [True,False,True], - Native + Modulated + Unmodulated: [True,True,True], + desc="""Options to produce grey matter images: c1*.img, wc1*.img and mwc1*.img. + None: [False,False,False], + Native Space: [False,False,True], + Unmodulated Normalised: [False,True,False], + Modulated Normalised: [True,False,False], + Native + Unmodulated Normalised: [False,True,True], + Native + Modulated Normalised: [True,False,True], + Native + Modulated + Unmodulated: [True,True,True], Modulated + Unmodulated Normalised: [True,True,False]""") wm_output_type = traits.List(traits.Bool(), minlen=3, maxlen=3, field='output.WM', - desc="""Options to produce white matter images: c2*.img, wc2*.img and mwc2*.img. - None: [False,False,False], - Native Space: [False,False,True], - Unmodulated Normalised: [False,True,False], - Modulated Normalised: [True,False,False], - Native + Unmodulated Normalised: [False,True,True], - Native + Modulated Normalised: [True,False,True], - Native + Modulated + Unmodulated: [True,True,True], + desc="""Options to produce white matter images: c2*.img, wc2*.img and mwc2*.img. + None: [False,False,False], + Native Space: [False,False,True], + Unmodulated Normalised: [False,True,False], + Modulated Normalised: [True,False,False], + Native + Unmodulated Normalised: [False,True,True], + Native + Modulated Normalised: [True,False,True], + Native + Modulated + Unmodulated: [True,True,True], Modulated + Unmodulated Normalised: [True,True,False]""") csf_output_type = traits.List(traits.Bool(), minlen=3, maxlen=3, field='output.CSF', - desc="""Options to produce CSF images: c3*.img, wc3*.img and mwc3*.img. - None: [False,False,False], - Native Space: [False,False,True], - Unmodulated Normalised: [False,True,False], - Modulated Normalised: [True,False,False], - Native + Unmodulated Normalised: [False,True,True], - Native + Modulated Normalised: [True,False,True], - Native + Modulated + Unmodulated: [True,True,True], + desc="""Options to produce CSF images: c3*.img, wc3*.img and mwc3*.img. + None: [False,False,False], + Native Space: [False,False,True], + Unmodulated Normalised: [False,True,False], + Modulated Normalised: [True,False,False], + Native + Unmodulated Normalised: [False,True,True], + Native + Modulated Normalised: [True,False,True], + Native + Modulated + Unmodulated: [True,True,True], Modulated + Unmodulated Normalised: [True,True,False]""") save_bias_corrected = traits.Bool(field='output.biascor', desc='True/False produce a bias corrected image') @@ -518,7 +518,7 @@ class SegmentOutputSpec(TraitedSpec): class Segment(SPMCommand): """use spm_segment to separate structural images into different tissue classes. - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=43 Examples @@ -527,7 +527,7 @@ class Segment(SPMCommand): >>> seg = spm.Segment() >>> seg.inputs.data = 'structural.nii' >>> seg.run() # doctest: +SKIP - + """ _jobtype = 'spatial' @@ -585,7 +585,7 @@ class NewSegmentInputSpec(SPMCommandInputSpec): desc="""A tuple with the following fields: - bias reguralisation (0-10) - FWHM of Gaussian smoothness of bias - - which maps to save (Corrected, Field) - a tuple of two boolean values""", + - which maps to save (Corrected, Field) - a tuple of two boolean values""", field='channel') tissues = traits.List(traits.Tuple(traits.Tuple(File(exists=True), traits.Int()), traits.Int(), traits.Tuple(traits.Bool, traits.Bool), traits.Tuple(traits.Bool, traits.Bool)), @@ -593,7 +593,7 @@ class NewSegmentInputSpec(SPMCommandInputSpec): - tissue probability map (4D), 1-based index to frame - number of gaussians - which maps to save [Native, DARTEL] - a tuple of two boolean values - - which maps to save [Modulated, Unmodualted] - a tuple of two boolean values""", + - which maps to save [Modulated, Unmodualted] - a tuple of two boolean values""", field='tissue') affine_regularization = traits.Enum('mni', 'eastern', 'subj', 'none', field='warp.affreg', desc='mni, eastern, subj, none ') @@ -618,7 +618,7 @@ class NewSegment(SPMCommand): tissue classes. Supports multiple modalities. NOTE: This interface currently supports single channel input only - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=185 Examples @@ -727,7 +727,7 @@ class SmoothOutputSpec(TraitedSpec): class Smooth(SPMCommand): """Use spm_smooth for 3D Gaussian smoothing of image volumes. - + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=57 Examples @@ -757,7 +757,7 @@ def _format_arg(self, opt, spec, val): return val if opt == 'implicit_masking': return int(val) - + return val def _list_outputs(self): diff --git a/nipype/interfaces/spm/tests/test_base.py b/nipype/interfaces/spm/tests/test_base.py index dbbab59eab..61c36dab90 100644 --- a/nipype/interfaces/spm/tests/test_base.py +++ b/nipype/interfaces/spm/tests/test_base.py @@ -6,7 +6,7 @@ import numpy as np -from nipype.testing import (assert_equal, assert_false, assert_true, +from nipype.testing import (assert_equal, assert_false, assert_true, assert_raises, skipif) import nibabel as nb import nipype.interfaces.spm.base as spm @@ -34,7 +34,7 @@ def create_files_in_directory(): nb.save(nb.Nifti1Image(img,np.eye(4),hdr), os.path.join(outdir,f)) return filelist, outdir, cwd - + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) @@ -85,7 +85,7 @@ class TestClass(spm.SPMCommand): dc = TestClass() # dc = derived_class out = dc._reformat_dict_for_savemat({'a':{'b':{'c':[]}}}) yield assert_equal, out, [{'a': [{'b': [{'c': []}]}]}] - + def test_generate_job(): class TestClass(spm.SPMCommand): input_spec = spm.SPMCommandInputSpec @@ -112,7 +112,7 @@ class TestClass(spm.SPMCommand): contents['onsets'][0] = [1,2,3,4] out = dc._generate_job(prefix='test',contents=contents) yield assert_equal, out, 'test.onsets = {...\n[1, 2, 3, 4];...\n};\n' - + def test_make_matlab_command(): class TestClass(spm.SPMCommand): _jobtype = 'jobtype' diff --git a/nipype/interfaces/spm/tests/test_model.py b/nipype/interfaces/spm/tests/test_model.py index 7694f86898..e88148932c 100644 --- a/nipype/interfaces/spm/tests/test_model.py +++ b/nipype/interfaces/spm/tests/test_model.py @@ -6,7 +6,7 @@ import numpy as np -from nipype.testing import (assert_equal, assert_false, assert_true, +from nipype.testing import (assert_equal, assert_false, assert_true, assert_raises, skipif) import nibabel as nb import nipype.interfaces.spm.model as spm @@ -34,7 +34,7 @@ def create_files_in_directory(): nb.save(nb.Nifti1Image(img,np.eye(4),hdr), os.path.join(outdir,f)) return filelist, outdir, cwd - + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) @@ -141,7 +141,7 @@ def test_onesamplettestdesign(): for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value - + def test_twosamplettestdesign(): yield assert_equal, spm.TwoSampleTTestDesign._jobtype, 'stats' yield assert_equal, spm.TwoSampleTTestDesign._jobname, 'factorial_design' diff --git a/nipype/interfaces/spm/tests/test_preprocess.py b/nipype/interfaces/spm/tests/test_preprocess.py index f13deb7d49..8cd87463ff 100644 --- a/nipype/interfaces/spm/tests/test_preprocess.py +++ b/nipype/interfaces/spm/tests/test_preprocess.py @@ -6,7 +6,7 @@ import numpy as np -from nipype.testing import (assert_equal, assert_false, assert_true, +from nipype.testing import (assert_equal, assert_false, assert_true, assert_raises, skipif) import nibabel as nb import nipype.interfaces.spm as spm @@ -34,7 +34,7 @@ def create_files_in_directory(): nb.save(nb.Nifti1Image(img,np.eye(4),hdr), os.path.join(outdir,f)) return filelist, outdir, cwd - + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) @@ -152,7 +152,7 @@ def test_normalize(): for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(norm.inputs.traits()[key], metakey), value - + def test_normalize_list_outputs(): filelist, outdir, cwd = create_files_in_directory() norm = spm.Normalize(source=filelist[0]) diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py index 203fe6d973..bc0ccb0eb7 100644 --- a/nipype/interfaces/spm/utils.py +++ b/nipype/interfaces/spm/utils.py @@ -10,10 +10,10 @@ class Analyze2niiOutputSpec(SPMCommandInputSpec): nifti_file = File(exists=True) class Analyze2nii(SPMCommand): - + input_spec = Analyze2niiInputSpec output_spec = Analyze2niiOutputSpec - + def _make_matlab_command(self, _): script = "V = spm_vol('%s');\n"%self.inputs.analyze_file _, name,_ = split_filename(self.inputs.analyze_file) @@ -21,9 +21,9 @@ def _make_matlab_command(self, _): script += "[Y, XYZ] = spm_read_vols(V);\n" script += "V.fname = '%s';\n"%self.output_name script += "spm_write_vol(V, Y);\n" - + return script - + def _list_outputs(self): outputs = self._outputs().get() outputs['nifti_file'] = self.output_name diff --git a/nipype/interfaces/tests/test_base.py b/nipype/interfaces/tests/test_base.py index 5810c47c37..d06587d331 100644 --- a/nipype/interfaces/tests/test_base.py +++ b/nipype/interfaces/tests/test_base.py @@ -76,7 +76,7 @@ def teardown_file(tmp_dir): def test_TraitedSpec(): yield assert_true, nib.TraitedSpec().get_hashval() yield assert_equal, nib.TraitedSpec().__repr__(), '\n\n' - + class spec(nib.TraitedSpec): foo = nib.traits.Int goo = nib.traits.Float(usedefault=True) @@ -95,7 +95,7 @@ class spec(nib.TraitedSpec): def test_TraitedSpec_logic(): class spec3(nib.TraitedSpec): _xor_inputs = ('foo', 'bar') - + foo = nib.traits.Int(xor = _xor_inputs, desc = 'foo or bar, not both') bar = nib.traits.Int(xor = _xor_inputs, @@ -118,7 +118,7 @@ class MyInterface(nib.BaseInterface): yield assert_equal, myif.inputs.foo, 1 myif.inputs.kung = 2 yield assert_equal, myif.inputs.kung, 2.0 - + def checknose(): """check version of nose for known incompatability""" @@ -169,7 +169,7 @@ def test_Interface(): class DerivedInterface(nib.Interface): def __init__(self): pass - + nif = DerivedInterface() yield assert_raises, NotImplementedError, nif.run yield assert_raises, NotImplementedError, nif.aggregate_outputs @@ -179,7 +179,7 @@ def __init__(self): def test_BaseInterface(): yield assert_equal, nib.BaseInterface.help(), None yield assert_equal, nib.BaseInterface._get_filecopy_info(), [] - + class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int') @@ -191,7 +191,7 @@ class OutputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int') class DerivedInterface(nib.BaseInterface): input_spec = InputSpec - + yield assert_equal, DerivedInterface.help(), None yield assert_equal, DerivedInterface()._outputs(), None yield assert_equal, DerivedInterface._get_filecopy_info()[0]['key'], 'woo' @@ -203,7 +203,7 @@ class DerivedInterface(nib.BaseInterface): yield assert_equal, DerivedInterface(goo=1)._check_mandatory_inputs(), None yield assert_raises, ValueError, DerivedInterface().run yield assert_raises, NotImplementedError, DerivedInterface(goo=1).run - + class DerivedInterface2(DerivedInterface): output_spec = OutputSpec def _run_interface(self, runtime): @@ -249,7 +249,7 @@ class CommandLineInputSpec1(nib.CommandLineInputSpec): yield assert_equal, cmd[0], '-g' yield assert_equal, cmd[-1], '-i 1 -i 2 -i 3' yield assert_true, 'hello' not in ' '.join(cmd) - + class CommandLineInputSpec2(nib.CommandLineInputSpec): foo = nib.File(argstr='%s', desc='a str', genfile=True) nib.CommandLine.input_spec = CommandLineInputSpec2 @@ -260,7 +260,7 @@ class DerivedClass(nib.CommandLine): input_spec = CommandLineInputSpec2 def _gen_filename(self, name): return 'filename' - + ci6 = DerivedClass(command='cmd') yield assert_equal, ci6._parse_inputs()[0], 'filename' nib.CommandLine.input_spec = nib.CommandLineInputSpec diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py index 02b47455df..5a7a8bfe9a 100644 --- a/nipype/interfaces/tests/test_io.py +++ b/nipype/interfaces/tests/test_io.py @@ -7,13 +7,13 @@ from nipype.testing import assert_equal, assert_true, assert_false import nipype.interfaces.io as nio -from nipype.interfaces.base import Undefined +from nipype.interfaces.base import Undefined def test_datagrabber(): dg = nio.DataGrabber() yield assert_equal, dg.inputs.template, Undefined yield assert_equal, dg.inputs.base_directory, Undefined - yield assert_equal, dg.inputs.template_args,{'outfiles': []} + yield assert_equal, dg.inputs.template_args,{'outfiles': []} def test_datasink(): ds = nio.DataSink() diff --git a/nipype/interfaces/tests/test_matlab.py b/nipype/interfaces/tests/test_matlab.py index 2cef94677e..b1119d5d81 100644 --- a/nipype/interfaces/tests/test_matlab.py +++ b/nipype/interfaces/tests/test_matlab.py @@ -4,7 +4,7 @@ from tempfile import mkdtemp from shutil import rmtree -from nipype.testing import (assert_equal, assert_true, assert_false, +from nipype.testing import (assert_equal, assert_true, assert_false, assert_raises, skipif) import nipype.interfaces.matlab as mlab from nipype.interfaces.base import CommandLine, Bunch @@ -25,7 +25,7 @@ if matlab_path != '': no_matlab = False mlab.MatlabCommand.set_default_matlab_cmd(matlab_cmd) - + # If a test requires matlab, prefix it with the skipif decorator like # below. Must import skipif from nipype.testing # @@ -38,10 +38,10 @@ def test_cmdline(): basedir = mkdtemp() mi = mlab.MatlabCommand(script='whos', script_file='testscript', mfile=False) - + yield assert_equal, mi.cmdline, \ matlab_cmd + ' -nodesktop -nosplash -singleCompThread -r "fprintf(1,\'Executing code at %s:\\n\',datestr(now));ver,try,whos,catch ME,fprintf(2,\'MATLAB code threw an exception:\\n\');fprintf(2,\'%s\\n\',ME.message);if length(ME.stack) ~= 0, fprintf(2,\'File:%s\\nName:%s\\nLine:%d\\n\',ME.stack.file,ME.stack.name,ME.stack.line);, end;end;;exit"' - + yield assert_equal, mi.inputs.script, 'whos' yield assert_equal, mi.inputs.script_file, 'testscript' path_exists = os.path.exists(os.path.join(basedir,'testscript.m')) @@ -84,11 +84,11 @@ def test_run_interface(): yield assert_equal, res.runtime.returncode, 0 os.chdir(cwd) rmtree(basedir) - + @skipif(no_matlab) def test_set_matlabcmd(): mi = mlab.MatlabCommand() mi.set_default_matlab_cmd('foo') yield assert_equal, mi._default_matlab_cmd, 'foo' mi.set_default_matlab_cmd(matlab_cmd) - + diff --git a/nipype/interfaces/tests/test_utility.py b/nipype/interfaces/tests/test_utility.py index 4826c8b14d..5f3c95d963 100644 --- a/nipype/interfaces/tests/test_utility.py +++ b/nipype/interfaces/tests/test_utility.py @@ -5,13 +5,13 @@ from tempfile import mkdtemp from nipype.testing import assert_equal, assert_true -from nipype.interfaces import utility +from nipype.interfaces import utility def test_rename(): tempdir = os.path.realpath(mkdtemp()) origdir = os.getcwd() os.chdir(tempdir) - + # Test very simple rename _ = open("file.txt","w").close() rn = utility.Rename(in_file="file.txt", format_string="test_file1.txt") @@ -30,7 +30,7 @@ def test_rename(): rn.inputs.field2 = 2 res = rn.run() outfile = os.path.join(tempdir, "test_file2.txt") - yield assert_equal, res.outputs.out_file, outfile + yield assert_equal, res.outputs.out_file, outfile yield assert_true, os.path.exists(outfile) # Clean up diff --git a/nipype/interfaces/traits_extension.py b/nipype/interfaces/traits_extension.py index b74bc88bd4..8770585270 100644 --- a/nipype/interfaces/traits_extension.py +++ b/nipype/interfaces/traits_extension.py @@ -60,7 +60,7 @@ def __init__ ( self, value = '', filter = None, auto_set = False, self.auto_set = auto_set self.entries = entries self.exists = exists - + if exists: self.info_text = 'an existing file name' @@ -147,7 +147,7 @@ def __init__ ( self, value = '', auto_set = False, entries = 0, self.entries = entries self.auto_set = auto_set self.exists = exists - + if exists: self.info_text = 'an existing directory name' @@ -244,8 +244,8 @@ def has_metadata(trait, metadata, value, recursive=True): if hasattr(trait, 'handlers') and trait.handlers != None: for handler in trait.handlers: count += has_metadata(handler, metadata, recursive) - + return count > 0 - + diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index 1270e06285..b46bacadfc 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -12,13 +12,13 @@ from nipype.interfaces.io import IOBase, add_traits from nipype.testing import assert_equal from nipype.utils.misc import getsource, create_function_from_source, dumps - + class IdentityInterface(IOBase): """Basic interface class generates identity mappings Examples -------- - + >>> from nipype.interfaces.utility import IdentityInterface >>> ii = IdentityInterface(fields=['a','b'], mandatory_inputs=False) >>> ii.inputs.a @@ -32,7 +32,7 @@ class IdentityInterface(IOBase): >>> out = ii.run() >>> out.outputs.a 'foo' - + >>> ii2 = IdentityInterface(fields=['a','b'], mandatory_inputs=True) >>> ii2.inputs.a = 'foo' >>> out = ii2.run() # doctest: +SKIP @@ -40,7 +40,7 @@ class IdentityInterface(IOBase): """ input_spec = DynamicTraitedSpec output_spec = DynamicTraitedSpec - + def __init__(self, fields=None, mandatory_inputs = True, **inputs): super(IdentityInterface, self).__init__(**inputs) if fields is None or not fields: @@ -67,7 +67,7 @@ def _list_outputs(self): You can turn off mandatory inputs checking by passing mandatory_inputs = False to the constructor." % \ (self.__class__.__name__, key) raise ValueError(msg) - + outputs = self._outputs().get() for key in self._fields: val = getattr(self.inputs, key) @@ -86,7 +86,7 @@ class Merge(IOBase): Examples -------- - + >>> from nipype.interfaces.utility import Merge >>> mi = Merge(3) >>> mi.inputs.in1 = 1 @@ -95,16 +95,16 @@ class Merge(IOBase): >>> out = mi.run() >>> out.outputs.out [1, 2, 5, 3] - + """ input_spec = MergeInputSpec output_spec = MergeOutputSpec - + def __init__(self, numinputs=0, **inputs): super(Merge, self).__init__(**inputs) self.numinputs = numinputs add_traits(self.inputs, ['in%d'%(i+1) for i in range(numinputs)]) - + def _list_outputs(self): outputs = self._outputs().get() out = [] @@ -130,7 +130,7 @@ class RenameInputSpec(DynamicTraitedSpec): in_file = File(exists=True, mandatory=True, desc="file to rename") keep_ext = traits.Bool(desc="Keep in_file extension, replace non-extension component of name") - format_string = traits.String(mandatory=True, + format_string = traits.String(mandatory=True, desc="Python formatting string for output template") parse_string = traits.String(desc="Python regexp parse string to define replacement inputs") @@ -141,10 +141,10 @@ class RenameOutputSpec(TraitedSpec): class Rename(IOBase): """Change the name of a file based on a mapped format string. - To use additional inputs that will be defined at run-time, the class - constructor must be called with the format template, and the fields - identified will become inputs to the interface. - + To use additional inputs that will be defined at run-time, the class + constructor must be called with the format template, and the fields + identified will become inputs to the interface. + Additionally, you may set the parse_string input, which will be run over the input filename with a regular expressions search, and will fill in additional input fields from matched groups. Fields set with @@ -188,7 +188,7 @@ def __init__(self, format_string=None, **inputs): self.inputs.format_string = format_string self.fmt_fields = re.findall(r"%\((.+?)\)", format_string) add_traits(self.inputs, self.fmt_fields) - else: + else: self.fmt_fields = [] def _rename(self): @@ -211,7 +211,7 @@ def _run_interface(self, runtime): runtime.returncode = 0 _ = copyfile(self.inputs.in_file, os.path.join(os.getcwd(), self._rename())) return runtime - + def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.join(os.getcwd(), self._rename()) @@ -228,19 +228,19 @@ class Split(IOBase): Examples -------- - + >>> from nipype.interfaces.utility import Split >>> sp = Split() >>> _ = sp.inputs.set(inlist=[1,2,3],splits=[2,1]) >>> out = sp.run() >>> out.outputs.out1 [1, 2] - + """ input_spec = SplitInputSpec output_spec = DynamicTraitedSpec - + def _add_output_traits(self, base): undefined_traits = {} for i in range(len(self.inputs.splits)): @@ -249,7 +249,7 @@ def _add_output_traits(self, base): undefined_traits[key] = Undefined base.trait_set(trait_change_notify=False, **undefined_traits) return base - + def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.splits): @@ -267,34 +267,34 @@ class SelectInputSpec(BaseInterfaceInputSpec): desc='list of values to choose from') index = InputMultiPath(traits.Int, mandatory=True, desc='0-based indices of values to choose') - + class SelectOutputSpec(TraitedSpec): out = OutputMultiPath(traits.Any, desc='list of selected values') - + class Select(IOBase): """Basic interface class to select specific elements from a list Examples -------- - + >>> from nipype.interfaces.utility import Select >>> sl = Select() >>> _ = sl.inputs.set(inlist=[1,2,3,4,5],index=[3]) >>> out = sl.run() >>> out.outputs.out 4 - + >>> _ = sl.inputs.set(inlist=[1,2,3,4,5],index=[3,4]) >>> out = sl.run() >>> out.outputs.out [4, 5] - + """ input_spec = SelectInputSpec output_spec = SelectOutputSpec - + def _list_outputs(self): outputs = self._outputs().get() out = np.array(self.inputs.inlist)[np.array(self.inputs.index)].tolist() @@ -318,7 +318,7 @@ class Function(IOBase): 6 """ - + input_spec = FunctionInputSpec output_spec = DynamicTraitedSpec @@ -333,7 +333,7 @@ def __init__(self, input_names, output_names, function=None, **inputs): output_names: single str or list names corresponding to function outputs. has to match the number of outputs """ - + super(Function, self).__init__(**inputs) if function: if hasattr(function, '__call__'): @@ -370,7 +370,7 @@ def _add_output_traits(self, base): base.trait_set(trait_change_notify=False, **undefined_traits) return base - def _run_interface(self, runtime): + def _run_interface(self, runtime): function_handle = create_function_from_source(self.inputs.function_str) args = {} @@ -380,13 +380,13 @@ def _run_interface(self, runtime): args[name] = value out = function_handle(**args) - + if len(self._output_names) == 1: self._out[self._output_names[0]] = out else: if isinstance(out, tuple) and (len(out) != len(self._output_names)): raise RuntimeError('Mismatch in number of expected outputs') - + else: for idx, name in enumerate(self._output_names): self._out[name] = out[idx] @@ -403,15 +403,15 @@ def _list_outputs(self): class AssertEqualInputSpec(BaseInterfaceInputSpec): volume1 = File(exists=True, mandatory=True) volume2 = File(exists=True, mandatory=True) - + class AssertEqual(BaseInterface): input_spec = AssertEqualInputSpec - + def _run_interface(self, runtime): - + data1 = nb.load(self.inputs.volume1).get_data() data2 = nb.load(self.inputs.volume2).get_data() - + assert_equal(data1, data2) - + return runtime diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index be018bd056..546e580ea2 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -95,7 +95,7 @@ def fullname(self): fullname = self.name if self._hierarchy: fullname = self._hierarchy + '.' + self.name - return fullname + return fullname def clone(self, name): """Clone a workflowbase object @@ -149,14 +149,14 @@ def clone(self, name): .. note:: Will reset attributes used for executing workflow. See - _init_runtime_fields. + _init_runtime_fields. Parameters ---------- name: string (mandatory ) every clone requires a new name - + """ clone = super(Workflow, self).clone(name) clone._reset_hierarchy() @@ -175,7 +175,7 @@ def connect(self, *args, **kwargs): Parameters ---------- - + args : list or a set of four positional arguments Four positional arguments of the form:: @@ -233,7 +233,7 @@ def connect(self, *args, **kwargs): for srcnode, destnode, connects in connection_list: connected_ports = [] # check to see which ports of destnode are already - # connected. + # connected. if not disconnect and (destnode in self._graph.nodes()): for edge in self._graph.in_edges_iter(destnode): data = self._graph.get_edge_data(*edge) @@ -378,15 +378,15 @@ def write_graph(self, dotfilename='graph.dot', graph2use='hierarchical', format= Parameters ---------- - + graph2use: 'orig', 'hierarchical' (default), 'flat', 'exec' orig - creates a top level graph without expanding internal workflow nodes flat - expands workflow nodes recursively exec - expands workflows to depict iterables - + format: 'png', 'svg' - + """ graphtypes = ['orig', 'flat', 'hierarchical', 'exec'] if graph2use not in graphtypes: @@ -431,7 +431,7 @@ def run(self, plugin=None, plugin_args=None, updatehash=False): Parameters ---------- - + plugin: plugin name or object Plugin to use for execution. You can create your own plugins for execution. @@ -516,7 +516,7 @@ def _write_report_info(self, workingdir, name, graph): } function load(name, div) { - readfile(name, div); + readfile(name, div); return false; } function loadimg(name, div) { @@ -598,7 +598,7 @@ def _write_report_info(self, workingdir, name, graph): fp.writelines('
content
') fp.writelines('') fp.close() - + def _set_needed_outputs(self, graph): """Initialize node with list of which outputs are needed """ @@ -615,7 +615,7 @@ def _set_needed_outputs(self, graph): input_name = sourceinfo if input_name not in node.needed_outputs: node.needed_outputs += [input_name] - + def _configure_exec_nodes(self, graph): """Ensure that each node knows where to get inputs from """ @@ -630,7 +630,7 @@ def _configure_exec_nodes(self, graph): def _check_nodes(self, nodes): """Checks if any of the nodes are already in the graph - + """ node_names = [node.name for node in self._graph.nodes()] node_lineage = [node._hierarchy for node in self._graph.nodes()] @@ -876,7 +876,7 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=True): subnodename = subnodefullname.replace('.','_') for _ in self._graph.get_edge_data(node, subnode)['connect']: dotlist.append('%s -> %s;'%(nodename, subnodename)) - logger.debug('connection: ' + dotlist[-1]) + logger.debug('connection: ' + dotlist[-1]) # add between workflow connections for u,v,d in self._graph.edges_iter(data=True): uname = '.'.join(hierarchy + [u.fullname]) @@ -899,7 +899,7 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=True): if uname1.split('.')[:-1] != vname1.split('.')[:-1]: dotlist.append('%s -> %s;'%(uname1.replace('.','_'), vname1.replace('.','_'))) - logger.debug('cross connection: ' + dotlist[-1]) + logger.debug('cross connection: ' + dotlist[-1]) return ('\n'+prefix).join(dotlist) @@ -909,7 +909,7 @@ class Node(WorkflowBase): Parameters ---------- - + interface : interface object node specific interface (fsl.Bet(), spm.Coregister()) iterables : generator @@ -922,14 +922,14 @@ class Node(WorkflowBase): Notes ----- - + creates output directory copies/discovers files to work with saves a hash.json file to indicate that a process has been completed Examples -------- - + >>> import nipype.interfaces.spm as spm >>> realign = Node(interface=spm.Realign(), name='realign') >>> realign.inputs.in_files = 'functional.nii' @@ -977,7 +977,7 @@ def output_dir(self): outputdir = os.path.join(outputdir, *self.parameterization) return os.path.abspath(os.path.join(outputdir, self.name)) - + def set_input(self, parameter, val): """ Set interface input value or nodewrapper attribute @@ -1091,7 +1091,7 @@ def run(self, updatehash=False, force_execute=False): self._save_hashfile(hashfile, hashed_inputs) if force_execute or (not updatehash and (self.overwrite or not os.path.exists(hashfile))): logger.debug("Node hash: %s"%hashvalue) - + #by rerunning we mean only nodes that did finish to run previously if os.path.exists(outdir) \ and not isinstance(self, MapNode) \ @@ -1116,7 +1116,7 @@ def run(self, updatehash=False, force_execute=False): pass else: logdebug_dict_differences(prev_inputs, hashed_inputs) - if str2bool(self.config['execution']['stop_on_first_rerun']): + if str2bool(self.config['execution']['stop_on_first_rerun']): raise Exception("Cannot rerun when 'stop_on_first_rerun' is set to True") hashfile_unfinished = os.path.join(outdir, '_0x%s_unfinished.json' % hashvalue) if os.path.exists(hashfile): @@ -1249,7 +1249,7 @@ def _run_command(self, execute, copyfiles=True): except Exception, msg: self._result.runtime.stderr = msg raise - + if str2bool(self.config['execution']['remove_unnecessary_outputs']): dirs2keep = None if isinstance(self, MapNode): @@ -1277,7 +1277,7 @@ def _strip_temp(self, files, wd): else: out.append(f.replace(os.path.join(wd,'_tempinput'),wd)) return out - + def _copyfiles_to_wd(self, outdir, execute, linksonly=False): """ copy files over and change the inputs""" @@ -1348,7 +1348,7 @@ def write_report(self, report_type=None, cwd=None): fp.writelines(write_rst_dict({'hostname' : self.result.runtime.hostname, 'duration' : self.result.runtime.duration, 'command' : self.result.runtime.cmdline})) - else: + else: fp.writelines(write_rst_dict({'hostname' : self.result.runtime.hostname, 'duration' : self.result.runtime.duration})) if hasattr(self.result.runtime, 'merged'): @@ -1370,7 +1370,7 @@ class MapNode(Node): >>> realign = MapNode(interface=fsl.MCFLIRT(), name='realign', iterfield=['in_file']) # doctest: +SKIP >>> realign.inputs.in_file = ['functional.nii', 'functional2.nii', 'functional3.nii'] # doctest: +SKIP >>> realign.run() # doctest: +SKIP - + """ def __init__(self, interface, iterfield=None, **kwargs): @@ -1542,20 +1542,20 @@ def write_report(self, report_type=None, cwd=None): subnode_report_files.insert(i, 'subnode %d'%i + ' : ' + os.path.join(cwd, 'mapflow', nodename, '_report', 'report.rst')) fp.writelines(write_rst_list(subnode_report_files)) fp.close() - + def get_subnodes(self): if not self._got_inputs: self._get_inputs() self._got_inputs = True self.write_report(report_type='preexec', cwd = self.output_dir()) return [node for _, node in self._make_nodes()] - + def num_subnodes(self): if not self._got_inputs: self._get_inputs() self._got_inputs = True return len(filename_to_list(getattr(self.inputs, self.iterfield[0]))) - + def _get_inputs(self): old_inputs = self._inputs.get() self._inputs = self._create_dynamic_traits(self._interface.inputs, diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 99c5c1292e..a427da7312 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -93,7 +93,7 @@ def run(self, graph, config): class DistributedPluginBase(PluginBase): """Execute workflow with a distribution engine """ - + def __init__(self, plugin_args=None): """Initialize runtime attributes to none @@ -315,7 +315,7 @@ def _submit_batchtask(self, scriptfile): """Submit a task to the batch system """ raise NotImplementedError - + def _get_result(self, taskid): if taskid not in self._pending: raise Exception('Task %d not found'%taskid) diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index 0b4feff91b..5abe021485 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -19,7 +19,7 @@ def run(self, graph, config, updatehash=False): ---------- graph : networkx digraph - defines order of execution + defines order of execution """ if not isinstance(graph, nx.DiGraph): diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 4a7b0fb14f..4393dfbee7 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -26,7 +26,7 @@ class MultiProcPlugin(DistributedPluginBase): execution. Currently supported options are: - n_procs : number of processes to use - + """ def __init__(self, plugin_args=None): diff --git a/nipype/pipeline/tests/test_engine.py b/nipype/pipeline/tests/test_engine.py index cb6d6cf6c2..768d4e7e31 100644 --- a/nipype/pipeline/tests/test_engine.py +++ b/nipype/pipeline/tests/test_engine.py @@ -26,11 +26,11 @@ class OutputSpec(nib.TraitedSpec): class TestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec - + def _run_interface(self, runtime): runtime.returncode = 0 return runtime - + def _list_outputs(self): outputs = self._outputs().get() outputs['output1'] = [1, self.inputs.input1] @@ -109,7 +109,7 @@ def test2(): pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 4 yield assert_equal, len(pipe._execgraph.edges()), 0 - + def test3(): pipe = pe.Workflow(name='pipe') mod1 = pe.Node(interface=TestInterface(),name='mod1') @@ -121,7 +121,7 @@ def test3(): pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 3 yield assert_equal, len(pipe._execgraph.edges()), 2 - + def test4(): pipe = pe.Workflow(name='pipe') mod1 = pe.Node(interface=TestInterface(),name='mod1') @@ -346,4 +346,3 @@ def test_workflow_add(): yield assert_raises, IOError, w1.add_nodes, [n2] yield assert_raises, IOError, w1.add_nodes, [n3] yield assert_raises, IOError, w1.connect, [(w1,n2,[('n1.a','d')])] - \ No newline at end of file diff --git a/nipype/pipeline/utils.py b/nipype/pipeline/utils.py index 3cf1a635c1..ddba38986b 100644 --- a/nipype/pipeline/utils.py +++ b/nipype/pipeline/utils.py @@ -103,7 +103,7 @@ def get_print_name(node): For example, a node containing an instance of interfaces.fsl.BET would be called nodename.BET.fsl - + """ name = node.name if hasattr(node, '_interface'): @@ -114,7 +114,7 @@ def get_print_name(node): destclass = '.%s'%pkglist[2] name = '.'.join([node.name, interface]) + destclass return name - + def _create_dot_graph(graph, show_connectinfo=False): """Create a graph that can be pickled. @@ -283,7 +283,7 @@ def _merge_graphs(supergraph, nodes, subgraph, nodeid, iterables, prefix): ------- Returns a merged graph containing copies of the subgraph with appropriate edge connections to the supergraph. - + """ # Retrieve edge information connecting nodes of the subgraph to other # nodes of the supergraph. @@ -356,7 +356,7 @@ def _connect_nodes(graph, srcnode, destnode, connection_info): graph.add_edges_from([(srcnode, destnode, data)]) else: data['connect'].extend(connection_info) - + def _remove_identity_nodes(graph): """Remove identity nodes from an execution graph """ @@ -402,7 +402,7 @@ def _remove_identity_nodes(graph): else: srcnode, srcport = portinputs[key] if isinstance(srcport, tuple) and isinstance(src, tuple): - raise ValueError('Does not support two inline functions in series (\'%s\' and \'%s\'). Please use a Function node'%(srcport[1].split("\\n")[0][6:-1], + raise ValueError('Does not support two inline functions in series (\'%s\' and \'%s\'). Please use a Function node'%(srcport[1].split("\\n")[0][6:-1], src[1].split("\\n")[0][6:-1])) if isinstance(src, tuple): connect = {'connect': [((srcport, src[1], src[2]), @@ -416,11 +416,11 @@ def _remove_identity_nodes(graph): def generate_expanded_graph(graph_in): """Generates an expanded graph based on node parameterization - + Parameterization is controlled using the `iterables` field of the pipeline elements. Thus if there are two nodes with iterables a=[1,2] and b=[3,4] this procedure will generate a graph with sub-graphs - parameterized as (a=1,b=3), (a=1,b=4), (a=2,b=3) and (a=2,b=4). + parameterized as (a=1,b=3), (a=1,b=4), (a=2,b=3) and (a=2,b=4). """ logger.debug("PE: expanding iterables") moreiterables = True @@ -464,21 +464,21 @@ def generate_expanded_graph(graph_in): def export_graph(graph_in, base_dir=None, show = False, use_execgraph=False, show_connectinfo=False, dotfilename='graph.dot', format='png'): """ Displays the graph layout of the pipeline - + This function requires that pygraphviz and matplotlib are available on the system. - + Parameters ---------- - + show : boolean Indicate whether to generate pygraphviz output fromn networkx. default [False] - + use_execgraph : boolean Indicates whether to use the specification graph or the execution graph. default [False] - + show_connectioninfo : boolean Indicates whether to show the edge data on the graph. This makes the graph rather cluttered. default [False] @@ -531,7 +531,7 @@ def make_output_dir(outdir): Parameters ---------- outdir : output directory to create - + """ if not os.path.exists(os.path.abspath(outdir)): logger.debug("Creating %s" % outdir) @@ -617,12 +617,12 @@ def clean_working_directory(outputs, cwd, inputs, needed_outputs, def merge_dict(d1, d2, merge=lambda x,y:y): """ - Merges two dictionaries, non-destructively, combining + Merges two dictionaries, non-destructively, combining values on duplicate keys as defined by the optional merge function. The default behavior replaces the values in d1 with corresponding values in d2. (There is no other generally - applicable merge strategy, but often you'll have homogeneous - types in your dicts, so specifying a merge technique can be + applicable merge strategy, but often you'll have homogeneous + types in your dicts, so specifying a merge technique can be valuable.) Examples: diff --git a/nipype/testing/__init__.py b/nipype/testing/__init__.py index 3ec279a349..cd86cb2b49 100644 --- a/nipype/testing/__init__.py +++ b/nipype/testing/__init__.py @@ -34,11 +34,11 @@ def example_data(infile='functional.nii'): """returns path to empty example data files for doc tests it will raise an exception if filename is not in the directory""" - + filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) outfile = os.path.join(basedir, 'data', infile) if not os.path.exists(outfile): raise IOError('%s empty data file does NOT exist'%(outfile)) - - return outfile + + return outfile diff --git a/nipype/utils/docparse.py b/nipype/utils/docparse.py index 5c7bbce9f2..875b1b5ee6 100644 --- a/nipype/utils/docparse.py +++ b/nipype/utils/docparse.py @@ -42,7 +42,7 @@ def grab_doc(cmd, trap_error=True): if trap_error and proc.returncode: msg = 'Attempting to run %s. Returned Error: %s'%(cmd,stderr) raise IOError(msg) - + if stderr: # A few programs, like fast and fnirt, send their help to # stderr instead of stdout. @@ -58,7 +58,7 @@ def reverse_opt_map(opt_map): opt_map : dict Dictionary mapping the attribute name to a command line flag. Each interface class defines these for the command it wraps. - + Returns ------- rev_opt_map : dict @@ -139,7 +139,7 @@ def insert_doc(doc, new_items): >>> from nipype.utils.docparse import insert_doc >>> doc = '''Parameters ... ---------- - ... outline : + ... outline : ... something about an outline''' >>> new_items = ['infile : str', ' The name of the input file'] @@ -152,7 +152,7 @@ def insert_doc(doc, new_items): The name of the input file outfile : str The name of the output file - outline : + outline : something about an outline """ @@ -192,7 +192,7 @@ def build_doc(doc, opts): formated to match nipy standards (as best we can). """ - + # Split doc into line elements. Generally, each line is an # individual flag/option. doclist = doc.split('\n') @@ -229,7 +229,7 @@ def build_doc(doc, opts): def get_doc(cmd, opt_map, help_flag=None, trap_error=True): """Get the docstring from our command and options map. - + Parameters ---------- cmd : string @@ -239,7 +239,7 @@ def get_doc(cmd, opt_map, help_flag=None, trap_error=True): help_flag : string Provide additional help flag. e.g., -h trap_error : boolean - Override if underlying command returns a non-zero returncode + Override if underlying command returns a non-zero returncode Returns ------- @@ -271,7 +271,7 @@ def _parse_doc(doc, style=['--']): ------- optmap : dict of input parameters """ - + # Split doc into line elements. Generally, each line is an # individual flag/option. doclist = doc.split('\n') @@ -301,18 +301,18 @@ def _parse_doc(doc, style=['--']): def get_params_from_doc(cmd, style='--', help_flag=None, trap_error=True): """Auto-generate option map from command line help - + Parameters ---------- cmd : string The command whose documentation we are fetching style : string default ['--'] The help command style (--, -). Multiple styles can be provided in a - list e.g. ['--','-']. + list e.g. ['--','-']. help_flag : string Provide additional help flag. e.g., -h trap_error : boolean - Override if underlying command returns a non-zero returncode + Override if underlying command returns a non-zero returncode Returns ------- @@ -331,7 +331,7 @@ def get_params_from_doc(cmd, style='--', help_flag=None, trap_error=True): def replace_opts(rep_doc, opts): """Replace flags with parameter names. - + This is a simple operation where we replace the command line flags with the attribute names. @@ -347,7 +347,7 @@ def replace_opts(rep_doc, opts): ------- rep_doc : string New docstring with flags replaces with attribute names. - + Examples -------- doc = grab_doc('bet') diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index 17df29b566..5b6928c423 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -417,5 +417,5 @@ def write_rst_dict(info, prefix=''): for key, value in sorted(info.items()): out.append(prefix + '* ' + key + ' : ' + str(value)) return '\n'.join(out)+'\n\n' - + diff --git a/nipype/utils/ipy_profile_nipype.py b/nipype/utils/ipy_profile_nipype.py index b9e34239b8..b61c6ecbfa 100644 --- a/nipype/utils/ipy_profile_nipype.py +++ b/nipype/utils/ipy_profile_nipype.py @@ -84,7 +84,7 @@ def nipype_mode_off(): def main(): """When we use the nipype profile we turn nipype_mode on.""" - + ip = ipapi.get() mode = get_nipype_mode() # initialize nipype_mode in user_ns diff --git a/nipype/utils/matlabtools.py b/nipype/utils/matlabtools.py index 91cb2e6973..7e5b4e0d35 100644 --- a/nipype/utils/matlabtools.py +++ b/nipype/utils/matlabtools.py @@ -21,7 +21,7 @@ def mlab_tempfile(dir=None): Parameters ---------- - + dir : str A path to use as the starting directory. Note that this directory must already exist, it is NOT created if it doesn't (in that case, OSError @@ -62,5 +62,5 @@ def mlab_tempfile(dir=None): f.close() else: raise ValueError("Could not make temp file after 100 tries") - + return f diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index c10a0bf459..616b48efa1 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -39,12 +39,12 @@ def find_indices(condition): def is_container(item): """Checks if item is a container (list, tuple, dict, set) - + Parameters ---------- - item : object + item : object object to check for .__iter__ - + Returns ------- output : Boolean @@ -55,10 +55,10 @@ def is_container(item): return True else: return False - + def container_to_string(cont): """Convert a container to a command line string. - + Elements of the container are joined with a space between them, suitable for a command line parameter. diff --git a/nipype/utils/onetime.py b/nipype/utils/onetime.py index 40dbe0e562..972d12c221 100644 --- a/nipype/utils/onetime.py +++ b/nipype/utils/onetime.py @@ -26,7 +26,7 @@ def __init__(self, func): Parameters ---------- func : method - + The method that will be called the first time to compute a value. Afterwards, the method's name will be a standard attribute holding the value of this computation. @@ -69,7 +69,7 @@ def setattr_on_read(func): ... @setattr_on_read ... def a(self): ... return 99 - ... + ... >>> x = MagicProp() >>> 'a' in x.__dict__ False diff --git a/nipype/utils/spm_docs.py b/nipype/utils/spm_docs.py index b7ff1ced68..2f81542425 100644 --- a/nipype/utils/spm_docs.py +++ b/nipype/utils/spm_docs.py @@ -8,7 +8,7 @@ def grab_doc(task_name): """Grab the SPM documentation for the given SPM task named `task_name` - + Parameters ---------- task_name : string @@ -20,7 +20,7 @@ def grab_doc(task_name): -------- spm_flat_config.m : This function can print out all the possible task names. - + """ cmd = matlab.MatlabCommandLine() diff --git a/nipype/utils/tests/test_filemanip.py b/nipype/utils/tests/test_filemanip.py index a0435da1ab..510df60cab 100644 --- a/nipype/utils/tests/test_filemanip.py +++ b/nipype/utils/tests/test_filemanip.py @@ -176,4 +176,4 @@ def test_loadflat(): os.unlink(name) yield assert_true, isinstance(aloaded, dict) yield assert_equal, sorted(aloaded.items()), sorted(adict.items()) - + diff --git a/nipype/workflows/freesurfer/utils.py b/nipype/workflows/freesurfer/utils.py index c5d088a6e8..9a62339ba4 100644 --- a/nipype/workflows/freesurfer/utils.py +++ b/nipype/workflows/freesurfer/utils.py @@ -109,7 +109,7 @@ def create_getmask_flow(name='getmask', dilate_mask=True): dilate : dilate the transformed file in source space threshold2 : binarize transformed file """ - + threshold2 = pe.MapNode(fs.Binarize(min=0.5, out_type='nii'), iterfield=['in_file'], name='threshold2') diff --git a/nipype/workflows/fsl/dti.py b/nipype/workflows/fsl/dti.py index 4e57d7ffc6..418ebddbda 100644 --- a/nipype/workflows/fsl/dti.py +++ b/nipype/workflows/fsl/dti.py @@ -14,10 +14,10 @@ def create_bedpostx_pipeline(name="bedpostx"): """Creates a pipeline that does the same as bedpostx script from FSL - calculates diffusion model parameters (distributions not MLE) voxelwise for the whole volume (by splitting it slicewise). - + Example ------- - + >>> nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx") >>> nipype_bedpostx.inputs.inputnode.dwi = 'diffusion.nii' >>> nipype_bedpostx.inputs.inputnode.mask = 'mask.nii' @@ -29,14 +29,14 @@ def create_bedpostx_pipeline(name="bedpostx"): >>> nipype_bedpostx.inputs.xfibres.n_jumps = 1250 >>> nipype_bedpostx.inputs.xfibres.sample_every = 25 >>> nipype_bedpostx.run() # doctest: +SKIP - + Inputs:: - + inputnode.dwi inputnode.mask - + Outputs:: - + outputnode.thsamples outputnode.phsamples outputnode.fsamples @@ -45,76 +45,76 @@ def create_bedpostx_pipeline(name="bedpostx"): outputnode.mean_fsamples outputnode.dyads outputnode.dyads_dispersion - + """ - - - inputnode = pe.Node(interface = util.IdentityInterface(fields=["dwi", "mask"]), + + + inputnode = pe.Node(interface = util.IdentityInterface(fields=["dwi", "mask"]), name="inputnode") - - mask_dwi = pe.Node(interface = fsl.ImageMaths(op_string = "-mas"), + + mask_dwi = pe.Node(interface = fsl.ImageMaths(op_string = "-mas"), name="mask_dwi") slice_dwi = pe.Node(interface = fsl.Split(dimension="z"), name="slice_dwi") - slice_mask = pe.Node(interface = fsl.Split(dimension="z"), + slice_mask = pe.Node(interface = fsl.Split(dimension="z"), name="slice_mask") - + preproc = pe.Workflow(name="preproc") - + preproc.connect([(inputnode, mask_dwi, [('dwi', 'in_file')]), (inputnode, mask_dwi, [('mask', 'in_file2')]), (mask_dwi, slice_dwi, [('out_file', 'in_file')]), (inputnode, slice_mask, [('mask', 'in_file')]) ]) - - xfibres = pe.MapNode(interface=fsl.XFibres(), name="xfibres", + + xfibres = pe.MapNode(interface=fsl.XFibres(), name="xfibres", iterfield=['dwi', 'mask']) - - + + # Normal set of parameters - xfibres.inputs.n_fibres = 2 - xfibres.inputs.fudge = 1 - xfibres.inputs.burn_in = 1000 - xfibres.inputs.n_jumps = 1250 + xfibres.inputs.n_fibres = 2 + xfibres.inputs.fudge = 1 + xfibres.inputs.burn_in = 1000 + xfibres.inputs.n_jumps = 1250 xfibres.inputs.sample_every = 25 xfibres.inputs.model = 1 xfibres.inputs.non_linear = True xfibres.inputs.update_proposal_every = 24 - - inputnode = pe.Node(interface = util.IdentityInterface(fields=["thsamples", - "phsamples", - "fsamples", - "dyads", + + inputnode = pe.Node(interface = util.IdentityInterface(fields=["thsamples", + "phsamples", + "fsamples", + "dyads", "mean_dsamples", - "mask"]), + "mask"]), name="inputnode") - - merge_thsamples = pe.MapNode(fsl.Merge(dimension="z"), + + merge_thsamples = pe.MapNode(fsl.Merge(dimension="z"), name="merge_thsamples", iterfield=['in_files']) - merge_phsamples = pe.MapNode(fsl.Merge(dimension="z"), + merge_phsamples = pe.MapNode(fsl.Merge(dimension="z"), name="merge_phsamples", iterfield=['in_files']) - merge_fsamples = pe.MapNode(fsl.Merge(dimension="z"), + merge_fsamples = pe.MapNode(fsl.Merge(dimension="z"), name="merge_fsamples", iterfield=['in_files']) - - - merge_mean_dsamples = pe.Node(fsl.Merge(dimension="z"), + + + merge_mean_dsamples = pe.Node(fsl.Merge(dimension="z"), name="merge_mean_dsamples") - - mean_thsamples = pe.MapNode(fsl.ImageMaths(op_string="-Tmean"), + + mean_thsamples = pe.MapNode(fsl.ImageMaths(op_string="-Tmean"), name="mean_thsamples", iterfield=['in_file']) - mean_phsamples = pe.MapNode(fsl.ImageMaths(op_string="-Tmean"), + mean_phsamples = pe.MapNode(fsl.ImageMaths(op_string="-Tmean"), name="mean_phsamples", iterfield=['in_file']) - mean_fsamples = pe.MapNode(fsl.ImageMaths(op_string="-Tmean"), + mean_fsamples = pe.MapNode(fsl.ImageMaths(op_string="-Tmean"), name="mean_fsamples", iterfield=['in_file']) - make_dyads = pe.MapNode(fsl.MakeDyadicVectors(), name="make_dyads", + make_dyads = pe.MapNode(fsl.MakeDyadicVectors(), name="make_dyads", iterfield=['theta_vol', 'phi_vol']) postproc = pe.Workflow(name="postproc") - + postproc.connect([(inputnode, merge_thsamples, [(('thsamples',transpose), 'in_files')]), (inputnode, merge_phsamples, [(('phsamples',transpose), 'in_files')]), (inputnode, merge_fsamples, [(('fsamples',transpose), 'in_files')]), (inputnode, merge_mean_dsamples, [('mean_dsamples', 'in_files')]), - + (merge_thsamples, mean_thsamples, [('merged_file', 'in_file')]), (merge_phsamples, mean_phsamples, [('merged_file', 'in_file')]), (merge_fsamples, mean_fsamples, [('merged_file', 'in_file')]), @@ -122,22 +122,22 @@ def create_bedpostx_pipeline(name="bedpostx"): (merge_phsamples, make_dyads, [('merged_file', 'phi_vol')]), (inputnode, make_dyads, [('mask', 'mask')]), ]) - - inputnode = pe.Node(interface = util.IdentityInterface(fields=["dwi", - "mask", - "bvecs", - "bvals"]), + + inputnode = pe.Node(interface = util.IdentityInterface(fields=["dwi", + "mask", + "bvecs", + "bvals"]), name="inputnode") - + bedpostx = pe.Workflow(name=name) bedpostx.connect([(inputnode, preproc, [('mask', 'inputnode.mask')]), (inputnode, preproc, [('dwi', 'inputnode.dwi')]), - + (preproc, xfibres, [('slice_dwi.out_files', 'dwi'), ('slice_mask.out_files', 'mask')]), (inputnode, xfibres, [('bvals', 'bvals')]), (inputnode, xfibres, [('bvecs', 'bvecs')]), - + (inputnode, postproc, [('mask', 'inputnode.mask')]), (xfibres, postproc, [('thsamples','inputnode.thsamples'), ('phsamples', 'inputnode.phsamples'), @@ -145,7 +145,7 @@ def create_bedpostx_pipeline(name="bedpostx"): ('dyads', 'inputnode.dyads'), ('mean_dsamples', 'inputnode.mean_dsamples')]), ]) - + outputnode = pe.Node(interface = util.IdentityInterface(fields=["thsamples", "phsamples", "fsamples", @@ -153,7 +153,7 @@ def create_bedpostx_pipeline(name="bedpostx"): "mean_phsamples", "mean_fsamples", "dyads", - "dyads_dispersion"]), + "dyads_dispersion"]), name="outputnode") bedpostx.connect([(postproc, outputnode, [("merge_thsamples.merged_file", "thsamples"), ("merge_phsamples.merged_file", "phsamples"), @@ -170,48 +170,48 @@ def create_eddy_correct_pipeline(name="eddy_correct"): """Creates a pipeline that replaces eddy_correct script in FSL. It takes a series of diffusion weighted images and linearly corregisters them to one reference image. - + Example ------- - + >>> nipype_eddycorrect = create_eddy_correct_pipeline("nipype_eddycorrect") >>> nipype_eddycorrect.inputs.inputnode.in_file = 'diffusion.nii' >>> nipype_eddycorrect.inputs.inputnode.ref_num = 0 >>> nipype_eddycorrect.run() # doctest: +SKIP - + Inputs:: - + inputnode.in_file inputnode.ref_num - + Outputs:: - + outputnode.eddy_corrected """ - - inputnode = pe.Node(interface = util.IdentityInterface(fields=["in_file", "ref_num"]), + + inputnode = pe.Node(interface = util.IdentityInterface(fields=["in_file", "ref_num"]), name="inputnode") - + pipeline = pe.Workflow(name=name) - + split = pe.Node(fsl.Split(), name="split") pipeline.connect([(inputnode, split, [("in_file", "in_file")])]) - + pick_ref = pe.Node(util.Select(), name="pick_ref") pipeline.connect([(split, pick_ref, [("out_files", "inlist")]), (inputnode, pick_ref, [("ref_num", "index")])]) - + coregistration = pe.MapNode(fsl.FLIRT(no_search=True, padding_size=1), name = "coregistration", iterfield=["in_file"]) pipeline.connect([(split, coregistration, [("out_files", "in_file")]), (pick_ref, coregistration, [("out", "reference")])]) - + merge = pe.Node(fsl.Merge(dimension="t"), name="merge") pipeline.connect([(coregistration, merge, [("out_file", "in_files")]) ]) - - outputnode = pe.Node(interface = util.IdentityInterface(fields=["eddy_corrected"]), + + outputnode = pe.Node(interface = util.IdentityInterface(fields=["eddy_corrected"]), name="outputnode") - + pipeline.connect([(merge, outputnode, [("merged_file", "eddy_corrected")])]) - + return pipeline \ No newline at end of file diff --git a/nipype/workflows/fsl/preprocess.py b/nipype/workflows/fsl/preprocess.py index 435cfab8db..4414a2a085 100644 --- a/nipype/workflows/fsl/preprocess.py +++ b/nipype/workflows/fsl/preprocess.py @@ -383,7 +383,7 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') inputspec.highpass : HWHM in TRs (if created with highpass=True) Outputs:: - + outputspec.reference : volume to which runs are realigned outputspec.motion_parameters : motion correction parameters outputspec.realigned_files : motion corrected files @@ -679,7 +679,7 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') featpreproc.connect(highpass, ('out_file', pickfirst), meanfunc3, 'in_file') else: featpreproc.connect(meanscale, ('out_file', pickfirst), meanfunc3, 'in_file') - + featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean') @@ -700,7 +700,7 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): inputnode.in_files : functional runs (filename or list of filenames) inputnode.fwhm : fwhm for smoothing with SUSAN inputnode.mask_file : mask used for estimating SUSAN thresholds (but not for smoothing) - + Outputs:: outputnode.smoothed_files : functional runs (filename or list of filenames) @@ -716,9 +716,9 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): >>> smooth.run() # doctest: +SKIP """ - - susan_smooth = pe.Workflow(name=name) - + + susan_smooth = pe.Workflow(name=name) + """ Set up a node to define all inputs required for the preprocessing workflow @@ -728,7 +728,7 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): 'fwhm', 'mask_file']), name='inputnode') - + """ Smooth each run using SUSAN with the brightness threshold set to 75% of the median value for each run and a mask consituting the mean @@ -738,7 +738,7 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): smooth = pe.MapNode(interface=fsl.SUSAN(), iterfield=['in_file', 'brightness_threshold','usans'], name='smooth') - + """ Determine the median value of the functional runs using the mask """ @@ -754,7 +754,7 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): name='median') susan_smooth.connect(inputnode, 'in_files', median, 'in_file') susan_smooth.connect(inputnode, 'mask_file', median, 'mask_file') - + """ Mask the motion corrected functional runs with the dilated mask """ @@ -771,7 +771,7 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): name='mask') susan_smooth.connect(inputnode, 'in_files', mask, 'in_file') susan_smooth.connect(inputnode, 'mask_file', mask, 'in_file2') - + """ Determine the mean image from each functional run """ @@ -781,7 +781,7 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): iterfield=['in_file'], name='meanfunc2') susan_smooth.connect(mask, 'out_file', meanfunc, 'in_file') - + """ Merge the median values with the mean functional images into a coupled list """ @@ -801,9 +801,9 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): outputnode = pe.Node(interface=util.IdentityInterface(fields=['smoothed_files']), name='outputnode') - + susan_smooth.connect(smooth, 'smoothed_file', outputnode, 'smoothed_files') - + return susan_smooth diff --git a/nipype/workflows/fsl/tests/test_dti.py b/nipype/workflows/fsl/tests/test_dti.py index 2e09ff7ced..cdf7d84dae 100644 --- a/nipype/workflows/fsl/tests/test_dti.py +++ b/nipype/workflows/fsl/tests/test_dti.py @@ -20,28 +20,28 @@ @with_setup(setup_test_dir, remove_test_dir) def test_create_eddy_correct_pipeline(): fsl_course_dir = os.environ["FSL_COURSE_DATA"] - + dwi_file = os.path.join(fsl_course_dir, "fsl_course_data/fdt/subj1/data.nii.gz") - + nipype_eddycorrect = fsl_wf.create_eddy_correct_pipeline("nipype_eddycorrect") nipype_eddycorrect.inputs.inputnode.in_file = dwi_file nipype_eddycorrect.inputs.inputnode.ref_num = 0 - + with warnings.catch_warnings(): warnings.simplefilter("ignore") original_eddycorrect = pe.Node(interface = fsl.EddyCorrect(), name="original_eddycorrect") original_eddycorrect.inputs.in_file = dwi_file original_eddycorrect.inputs.ref_num = 0 - + test = pe.Node(util.AssertEqual(), name="eddy_corrected_dwi_test") - + pipeline = pe.Workflow(name="test_eddycorrect") pipeline.base_dir = test_dir - + pipeline.connect([(nipype_eddycorrect, test, [("outputnode.eddy_corrected", "inputnode.volume1")]), (original_eddycorrect, test, [("eddy_corrected", "inputnode.volume2")]), ]) - + pipeline.run(inseries=True) @skipif(no_fsl) @@ -49,12 +49,12 @@ def test_create_eddy_correct_pipeline(): @with_setup(setup_test_dir, remove_test_dir) def test_create_bedpostx_pipeline(): fsl_course_dir = os.environ["FSL_COURSE_DATA"] - + mask_file = os.path.join(fsl_course_dir, "fsl_course_data/fdt/subj1.bedpostX/nodif_brain_mask.nii.gz") bvecs_file = os.path.join(fsl_course_dir, "fsl_course_data/fdt/subj1/bvecs") bvals_file = os.path.join(fsl_course_dir, "fsl_course_data/fdt/subj1/bvals") dwi_file = os.path.join(fsl_course_dir, "fsl_course_data/fdt/subj1/data.nii.gz") - + nipype_bedpostx = fsl_wf.create_bedpostx_pipeline("nipype_bedpostx") nipype_bedpostx.inputs.inputnode.dwi = dwi_file nipype_bedpostx.inputs.inputnode.mask = mask_file @@ -65,7 +65,7 @@ def test_create_bedpostx_pipeline(): nipype_bedpostx.inputs.xfibres.burn_in = 1000 nipype_bedpostx.inputs.xfibres.n_jumps = 1250 nipype_bedpostx.inputs.xfibres.sample_every = 25 - + with warnings.catch_warnings(): warnings.simplefilter("ignore") original_bedpostx = pe.Node(interface = fsl.BEDPOSTX(), name="original_bedpostx") @@ -79,30 +79,30 @@ def test_create_bedpostx_pipeline(): original_bedpostx.inputs.burn_period = 1000 original_bedpostx.inputs.jumps = 1250 original_bedpostx.inputs.sampling = 25 - + test_f1 = pe.Node(util.AssertEqual(), name="mean_f1_test") test_f2 = pe.Node(util.AssertEqual(), name="mean_f2_test") test_th1 = pe.Node(util.AssertEqual(), name="mean_th1_test") test_th2 = pe.Node(util.AssertEqual(), name="mean_th2_test") test_ph1 = pe.Node(util.AssertEqual(), name="mean_ph1_test") test_ph2 = pe.Node(util.AssertEqual(), name="mean_ph2_test") - + pipeline = pe.Workflow(name="test_bedpostx") pipeline.base_dir = test_dir - + def pickFirst(l): return l[0] - + def pickSecond(l): return l[1] - + pipeline.connect([(nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples", pickFirst), "inputnode.volume1")]), (nipype_bedpostx, test_f2, [(("outputnode.mean_fsamples", pickSecond), "inputnode.volume1")]), (nipype_bedpostx, test_th1, [(("outputnode.mean_thsamples", pickFirst), "inputnode.volume1")]), (nipype_bedpostx, test_th2, [(("outputnode.mean_thsamples", pickSecond), "inputnode.volume1")]), (nipype_bedpostx, test_ph1, [(("outputnode.mean_phsamples", pickFirst), "inputnode.volume1")]), (nipype_bedpostx, test_ph2, [(("outputnode.mean_phsamples", pickSecond), "inputnode.volume1")]), - + (original_bedpostx, test_f1, [(("mean_fsamples", pickFirst), "inputnode.volume2")]), (original_bedpostx, test_f2, [(("mean_fsamples", pickSecond), "inputnode.volume2")]), (original_bedpostx, test_th1, [(("mean_thsamples", pickFirst), "inputnode.volume2")]), @@ -110,5 +110,5 @@ def pickSecond(l): (original_bedpostx, test_ph1, [(("mean_phsamples", pickFirst), "inputnode.volume2")]), (original_bedpostx, test_ph2, [(("mean_phsamples", pickSecond), "inputnode.volume2")]) ]) - + pipeline.run(inseries=True) \ No newline at end of file diff --git a/nipype/workflows/setup.py b/nipype/workflows/setup.py index 3d189ebfcb..9f3f90ba0c 100644 --- a/nipype/workflows/setup.py +++ b/nipype/workflows/setup.py @@ -9,7 +9,7 @@ def configuration(parent_package='',top_path=None): config.add_subpackage('freesurfer') config.add_subpackage('spm') config.add_subpackage('camino') - + return config if __name__ == '__main__': diff --git a/nipype/workflows/spm/preprocess.py b/nipype/workflows/spm/preprocess.py index f8b2b7599b..4730661501 100644 --- a/nipype/workflows/spm/preprocess.py +++ b/nipype/workflows/spm/preprocess.py @@ -164,9 +164,9 @@ def create_vbm_preproc(name='vbmpreproc'): 'fwhm', 'template_prefix']), name='inputspec') - + dartel_template = create_DARTEL_template() - + workflow.connect(inputnode, 'template_prefix', dartel_template, 'inputspec.template_prefix') workflow.connect(inputnode, 'structural_files', dartel_template, 'inputspec.structural_files') @@ -179,7 +179,7 @@ def getclass1images(class_images): for session in class_images: class1images.extend(session[0]) return class1images - + workflow.connect(dartel_template, ('segment.native_class_images', getclass1images), norm2mni, 'apply_to_files') workflow.connect(inputnode, 'fwhm', norm2mni, 'fwhm') @@ -202,7 +202,7 @@ def compute_icv(class_images): name='calc_icv') workflow.connect(dartel_template, 'segment.native_class_images', calc_icv, 'class_images') - + """ Define the outputs of the workflow and connect the nodes to the outputnode """ @@ -216,7 +216,7 @@ def compute_icv(class_images): (norm2mni, outputnode, [("normalized_files", "normalized_files")]), (calc_icv, outputnode, [("icv", "icv")]), ]) - + return workflow def create_DARTEL_template(name='dartel_template'): @@ -242,12 +242,12 @@ def create_DARTEL_template(name='dartel_template'): outputspec.flow_fields : warps from input struct files to the template """ - + workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['structural_files', 'template_prefix']), name='inputspec') - + segment = pe.MapNode(spm.NewSegment(), iterfield=['channel_files'], name='segment') @@ -260,9 +260,9 @@ def create_DARTEL_template(name='dartel_template'): tissue5 = ((os.path.join(spm_path,'toolbox/Seg/TPM.nii'), 5), 4, (False,False), (False, False)) tissue6 = ((os.path.join(spm_path,'toolbox/Seg/TPM.nii'), 6), 2, (False,False), (False, False)) segment.inputs.tissues = [tissue1, tissue2, tissue3, tissue4, tissue5, tissue6] - + dartel = pe.Node(spm.DARTEL(), name='dartel') - + """Get the gray and white segmentation classes generated by NewSegment """ @@ -276,7 +276,7 @@ def get2classes(dartel_files): workflow.connect(segment, ('dartel_input_images', get2classes), dartel, 'image_files') workflow.connect(inputnode, 'template_prefix', dartel, 'template_prefix') - + outputnode = pe.Node(niu.IdentityInterface(fields=["template_file", "flow_fields" ]), @@ -285,5 +285,5 @@ def get2classes(dartel_files): (dartel, outputnode, [('final_template_file','template_file'), ('dartel_flow_fields', 'flow_fields')]), ]) - + return workflow diff --git a/nipype/workflows/spm/tests/__init__.py b/nipype/workflows/spm/tests/__init__.py index 2232cd3166..cfdb162e68 100644 --- a/nipype/workflows/spm/tests/__init__.py +++ b/nipype/workflows/spm/tests/__init__.py @@ -1,2 +1 @@ __author__ = 'satra' - \ No newline at end of file diff --git a/setup.py b/setup.py index 300ec17c4b..8785ea9b2b 100755 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def configuration(parent_package='',top_path=None): delegate_options_to_subpackages=True, quiet=True) # The quiet=True option will silence all of the name setting warnings: - # Ignoring attempt to set 'name' (from 'nipy.core' to + # Ignoring attempt to set 'name' (from 'nipy.core' to # 'nipy.core.image') # Robert Kern recommends setting quiet=True on the numpy list, stating # these messages are probably only used in debugging numpy distutils. @@ -35,7 +35,7 @@ def configuration(parent_package='',top_path=None): ################################################################################ # For some commands, use setuptools -if len(set(('develop', 'bdist_egg', 'bdist_rpm', 'bdist', 'bdist_dumb', +if len(set(('develop', 'bdist_egg', 'bdist_rpm', 'bdist', 'bdist_dumb', 'bdist_wininst', 'install_egg_info', 'egg_info', 'easy_install', )).intersection(sys.argv)) > 0: from setup_egg import extra_setuptools_args @@ -48,7 +48,7 @@ def configuration(parent_package='',top_path=None): ################################################################################ -# Import the documentation building classes. +# Import the documentation building classes. try: from build_docs import cmdclass @@ -88,19 +88,19 @@ def configuration(parent_package='',top_path=None): def main(**extra_args): from numpy.distutils.core import setup - + install_requires=['numpy >=1.1', 'scipy >=0.7', 'matplotlib >=1.0.0', 'networkx >=1.0', 'nibabel >=1.0.0', 'traits >=4.0.0',] - + try: import json except ImportError: install_requires.append('simplejson') - + setup( name = 'nipype', description = 'Neuroimaging in Python: Pipelines and Interfaces', author = 'Various', diff --git a/setup_egg.py b/setup_egg.py index 3e6294769d..68ff5b15a1 100755 --- a/setup_egg.py +++ b/setup_egg.py @@ -3,7 +3,7 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Wrapper to run setup.py using setuptools.""" -from setuptools import setup +from setuptools import setup ################################################################################ # Call the setup.py script, injecting the setuptools-specific arguments. @@ -16,7 +16,7 @@ if __name__ == '__main__': - execfile('setup.py', dict(__name__='__main__', + execfile('setup.py', dict(__name__='__main__', extra_setuptools_args=extra_setuptools_args)) diff --git a/tools/apigen.py b/tools/apigen.py index c58ceacadc..ce52fad197 100644 --- a/tools/apigen.py +++ b/tools/apigen.py @@ -170,7 +170,7 @@ def _parse_module(self, uri): functions, classes = self._parse_lines(f) f.close() return functions, classes - + def _parse_lines(self, linesource): ''' Parse lines of text for functions and classes ''' functions = [] @@ -212,9 +212,9 @@ def generate_api_doc(self, uri): return '' # Make a shorter version of the uri that omits the package name for - # titles + # titles uri_short = re.sub(r'^%s\.' % self.package_name,'',uri) - + ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' chap_title = uri_short @@ -293,7 +293,7 @@ def _survives_exclude(self, matchstr, match_type): elif match_type == 'package': patterns = self.package_skip_patterns else: - raise ValueError('Cannot interpret match type "%s"' + raise ValueError('Cannot interpret match type "%s"' % match_type) # Match to URI without package name L = len(self.package_name) @@ -309,7 +309,7 @@ def _survives_exclude(self, matchstr, match_type): return True def discover_modules(self): - ''' Return module sequence discovered from ``self.package_name`` + ''' Return module sequence discovered from ``self.package_name`` Parameters @@ -330,7 +330,7 @@ def discover_modules(self): >>> dw.package_skip_patterns.append('\.util$') >>> 'sphinx.util' in dw.discover_modules() False - >>> + >>> ''' modules = [self.package_name] # raw directory parsing @@ -353,7 +353,7 @@ def discover_modules(self): self._survives_exclude(module_uri, 'module')): modules.append(module_uri) return sorted(modules) - + def write_modules_api(self, modules,outdir): # write the list written_modules = [] @@ -378,7 +378,7 @@ def write_api_docs(self, outdir): outdir : string Directory name in which to store files We create automatic filenames for each module - + Returns ------- None @@ -392,7 +392,7 @@ def write_api_docs(self, outdir): # compose list of modules modules = self.discover_modules() self.write_modules_api(modules,outdir) - + def write_index(self, outdir, froot='gen', relative_to=None): """Make a reST API index file from written files diff --git a/tools/gitwash_dumper.py b/tools/gitwash_dumper.py index 14b40ba16e..3a112b6eca 100755 --- a/tools/gitwash_dumper.py +++ b/tools/gitwash_dumper.py @@ -176,7 +176,7 @@ def main(): metavar="MAIN_GH_USER") parser.add_option("--gitwash-url", dest="gitwash_url", help="URL to gitwash repository - default %s" - % GITWASH_CENTRAL, + % GITWASH_CENTRAL, default=GITWASH_CENTRAL, metavar="GITWASH_URL") parser.add_option("--gitwash-branch", dest="gitwash_branch", diff --git a/tools/interfacedocgen.py b/tools/interfacedocgen.py index f0ac614e1d..b5b5048663 100644 --- a/tools/interfacedocgen.py +++ b/tools/interfacedocgen.py @@ -102,7 +102,7 @@ def __init__(self, class_skip_patterns : None or sequence Sequence of strings giving classes to be excluded Default is: None - + ''' if package_skip_patterns is None: package_skip_patterns = ['\\.tests$'] @@ -215,7 +215,7 @@ def _parse_module(self, uri): functions, classes = self._parse_lines(f, uri) f.close() return functions, classes - + def _parse_lines(self, linesource, module): ''' Parse lines of text for functions and classes ''' functions = [] @@ -246,7 +246,7 @@ def _write_graph_section(self, fname, title): fhandle = open(fname) for line in fhandle: ad += '\t' + line + '\n' - + fhandle.close() os.remove(fname) os.remove(fname + ".png") @@ -275,19 +275,19 @@ def generate_api_doc(self, uri): workflow = finst() except TypeError: continue - + if isinstance(workflow, Workflow): workflows.append((workflow,function, finst)) - + if not classes and not workflows: print 'WARNING: Empty -',uri # dbg return '' # Make a shorter version of the uri that omits the package name for - # titles + # titles #uri_short = re.sub(r'^%s\.' % self.package_name,'',uri) uri_short = uri - + ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' chap_title = uri_short @@ -348,7 +348,7 @@ def generate_api_doc(self, uri): setattr(classinst.inputs,i, None) except TraitError, excp: fieldstr += " : (%s)\n\t"%excp.info - + try: fieldstr += '\t' + getattr(v, 'desc') except: @@ -365,7 +365,7 @@ def generate_api_doc(self, uri): if not opthelpstr: opthelpstr = ['[Optional]'] opthelpstr += [fieldstr] - + if mandhelpstr: helpstr += '\n\t'.join(mandhelpstr) helpstr += '\n\n\t' @@ -373,7 +373,7 @@ def generate_api_doc(self, uri): helpstr += '\n\t'.join(opthelpstr) if helpstr: helpstr += '\n\n' - + if classinst._outputs(): iterator = classinst._outputs().items else: @@ -381,12 +381,12 @@ def generate_api_doc(self, uri): outstr = [] for i,v in sorted(iterator()): fieldstr = i - + try: setattr(classinst._outputs(),i, None) except TraitError, excp: fieldstr += " : (%s)\n\t"%excp.info - + try: fieldstr += '\t' + getattr(v, 'desc') except: @@ -410,20 +410,20 @@ def generate_api_doc(self, uri): '\n' \ ' .. automethod:: __init__\n' """ - + for workflow, name, finst in workflows: ad += '\n:class:`' + name + '()`\n' \ + self.rst_section_levels[2] * \ (len(name)+11) + '\n\n' helpstr = trim(finst.__doc__, self.rst_section_levels[3]) + "\n\n" ad += '\n' + helpstr + '\n' - - + + (_,fname) = tempfile.mkstemp(suffix=".dot") workflow.write_graph(dotfilename=fname, graph2use='hierarchical') - + ad += self._write_graph_section(fname, 'Graph') - + return ad def _survives_exclude(self, matchstr, match_type): @@ -454,7 +454,7 @@ def _survives_exclude(self, matchstr, match_type): elif match_type == 'class': patterns = self.class_skip_patterns else: - raise ValueError('Cannot interpret match type "%s"' + raise ValueError('Cannot interpret match type "%s"' % match_type) # Match to URI without package name L = len(self.package_name) @@ -470,7 +470,7 @@ def _survives_exclude(self, matchstr, match_type): return True def discover_modules(self): - ''' Return module sequence discovered from ``self.package_name`` + ''' Return module sequence discovered from ``self.package_name`` Parameters @@ -491,7 +491,7 @@ def discover_modules(self): >>> dw.package_skip_patterns.append('\.util$') >>> 'sphinx.util' in dw.discover_modules() False - >>> + >>> ''' modules = [self.package_name] # raw directory parsing @@ -514,7 +514,7 @@ def discover_modules(self): self._survives_exclude(module_uri, 'module')): modules.append(module_uri) return sorted(modules) - + def write_modules_api(self, modules,outdir): # write the list written_modules = [] @@ -539,7 +539,7 @@ def write_api_docs(self, outdir): outdir : string Directory name in which to store files We create automatic filenames for each module - + Returns ------- None @@ -553,7 +553,7 @@ def write_api_docs(self, outdir): # compose list of modules modules = self.discover_modules() self.write_modules_api(modules,outdir) - + def write_index(self, outdir, froot='gen', relative_to=None): """Make a reST API index file from written files diff --git a/tools/nipype_nightly.py b/tools/nipype_nightly.py index 275920e915..5644cedf58 100644 --- a/tools/nipype_nightly.py +++ b/tools/nipype_nightly.py @@ -13,7 +13,7 @@ def run_cmd(cmd): print cmd proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + stderr=subprocess.PIPE, env=os.environ, shell=True) output, error = proc.communicate() @@ -61,19 +61,19 @@ def setup_paths(): # this. nx_path = os.path.join(pkg_path, 'networkx-0.99-py2.6.egg') sys.path.insert(2, nx_path) - twisted_path = os.path.join(pkg_path_64, + twisted_path = os.path.join(pkg_path_64, 'Twisted-8.2.0-py2.6-linux-x86_64.egg') sys.path.insert(2, twisted_path) zope_path = os.path.join(pkg_path_64, 'zope.interface-3.5.2-py2.6-linux-x86_64.egg') sys.path.insert(2, zope_path) - foolscap_path = os.path.join(pkg_path, + foolscap_path = os.path.join(pkg_path, 'foolscap-0.2.9-py2.6.egg') sys.path.insert(2, foolscap_path) # Define our PYTHONPATH variable os.environ['PYTHONPATH'] = ':'.join(sys.path) - + if __name__ == '__main__': setup_paths() prev_dir = os.path.abspath(os.curdir) diff --git a/tools/report_coverage.py b/tools/report_coverage.py index 110a492e3a..95385243c7 100644 --- a/tools/report_coverage.py +++ b/tools/report_coverage.py @@ -3,7 +3,7 @@ import subprocess def run_tests(cmd): - proc = subprocess.Popen(cmd, + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) diff --git a/tools/run_examples.py b/tools/run_examples.py index 4721bf032a..5eb024d2aa 100644 --- a/tools/run_examples.py +++ b/tools/run_examples.py @@ -27,4 +27,4 @@ def run_examples(example, pipelines, plugin): for plugin in plugins: for example, pipelines in examples.items(): run_examples(example, pipelines, plugin) - + diff --git a/tools/run_interface.py b/tools/run_interface.py index 597be66ef2..ea9fceb825 100644 --- a/tools/run_interface.py +++ b/tools/run_interface.py @@ -16,7 +16,7 @@ def listClasses(module=None): for k,v in pkg.__dict__.items(): if 'class' in str(v) and k != '__builtins__': print "\t%s"%k - + def add_options(parser=None, module=None, function=None): interface = None if parser and module and function: @@ -61,7 +61,7 @@ def get_modfunc(args): if len(posargs)==2: function = posargs[1] return module, function - + def parse_args(): usage = "usage: %prog [options] module function" parser = OptionParser(usage=usage,version="%prog 1.0", @@ -69,7 +69,7 @@ def parse_args(): parser.add_option("--run", dest="run", action='store_true',help="Execute", default=False) - + module, function = get_modfunc(sys.argv[1:]) parser, interface = add_options(parser, module, function) (options, args) = parser.parse_args() @@ -81,7 +81,7 @@ def parse_args(): if module and not function: listClasses(module) parser.exit() - + #***************************************************************************** if __name__ == '__main__':