Skip to content

Commit

Permalink
sty: removed trailing spaces (thanks alex)
Browse files Browse the repository at this point in the history
  • Loading branch information
satra committed Sep 9, 2011
1 parent 7df211b commit 1c11e63
Show file tree
Hide file tree
Showing 109 changed files with 1,252 additions and 1,254 deletions.
5 changes: 3 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,8 @@ inplace:
$(PYTHON) setup.py build_ext -i

test-code: in
$(NOSETESTS) -s nipype
$(NOSETESTS) -s nipype --with-doctest

test-doc:
$(NOSETESTS) -s --with-doctest --doctest-tests --doctest-extension=rst \
--doctest-fixtures=_fixture doc/
Expand All @@ -62,5 +63,5 @@ test-coverage:
$(NOSETESTS) -s --with-coverage --cover-html --cover-html-dir=coverage \
--cover-package=nipype nipype

test: test-code test-doc
test: test-code

30 changes: 15 additions & 15 deletions build_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
DOC_DOCTREES_DIR = os.path.join('doc', '_build', 'doctrees')

################################################################################
# Distutils Command class for installing nipype to a temporary location.
# Distutils Command class for installing nipype to a temporary location.
class TempInstall(Command):
temp_install_dir = os.path.join('build', 'install')

Expand All @@ -34,11 +34,11 @@ def run(self):
install = self.distribution.get_command_obj('install')
install.install_scripts = self.temp_install_dir
install.install_base = self.temp_install_dir
install.install_platlib = self.temp_install_dir
install.install_purelib = self.temp_install_dir
install.install_data = self.temp_install_dir
install.install_lib = self.temp_install_dir
install.install_headers = self.temp_install_dir
install.install_platlib = self.temp_install_dir
install.install_purelib = self.temp_install_dir
install.install_data = self.temp_install_dir
install.install_lib = self.temp_install_dir
install.install_headers = self.temp_install_dir
install.run()

# Horrible trick to reload nipype with our temporary instal
Expand All @@ -52,13 +52,13 @@ def run(self):

def initialize_options(self):
pass

def finalize_options(self):
pass


################################################################################
# Distutils Command class for API generation
# Distutils Command class for API generation
class APIDocs(TempInstall):
description = \
"""generate API docs """
Expand Down Expand Up @@ -99,7 +99,7 @@ def relative_path(filename):


################################################################################
# Distutils Command class build the docs
# Distutils Command class build the docs
class MyBuildDoc(BuildDoc):
""" Sub-class the standard sphinx documentation building system, to
add logics for API generation and matplotlib's plot directive.
Expand All @@ -121,7 +121,7 @@ def run(self):
# in case I'm missing something?
BuildDoc.run(self)
self.zip_docs()

def zip_docs(self):
if not os.path.exists(DOC_BUILD_DIR):
raise OSError, 'Doc directory does not exist.'
Expand All @@ -131,19 +131,19 @@ def zip_docs(self):
# for it. ZIP_STORED produces an uncompressed zip, but does not
# require zlib.
try:
zf = zipfile.ZipFile(target_file, 'w',
zf = zipfile.ZipFile(target_file, 'w',
compression=zipfile.ZIP_DEFLATED)
except RuntimeError:
warnings.warn('zlib not installed, storing the docs '
'without compression')
zf = zipfile.ZipFile(target_file, 'w',
compression=zipfile.ZIP_STORED)
zf = zipfile.ZipFile(target_file, 'w',
compression=zipfile.ZIP_STORED)

for root, dirs, files in os.walk(DOC_BUILD_DIR):
relative = relative_path(root)
if not relative.startswith('.doctrees'):
for f in files:
zf.write(os.path.join(root, f),
zf.write(os.path.join(root, f),
os.path.join(relative, 'html_docs', f))
zf.close()

Expand Down Expand Up @@ -171,7 +171,7 @@ def run(self):
print "Removing %s" % interface_path
shutil.rmtree(interface_path)
if os.path.exists(DOC_BUILD_DIR):
print "Removing %s" % DOC_BUILD_DIR
print "Removing %s" % DOC_BUILD_DIR
shutil.rmtree(DOC_BUILD_DIR)
if os.path.exists(DOC_DOCTREES_DIR):
print "Removing %s" % DOC_DOCTREES_DIR
Expand Down
10 changes: 5 additions & 5 deletions doc/sphinxext/docscrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def _parse_param_list(self,content):

return params


_name_rgx = re.compile(r"^\s*(:(?P<role>\w+):`(?P<name>[a-zA-Z0-9_.-]+)`|"
r" (?P<name2>[a-zA-Z0-9_.-]+))\s*", re.X)
def _parse_see_also(self, content):
Expand Down Expand Up @@ -218,7 +218,7 @@ def push_item(name, rest):

current_func = None
rest = []

for line in content:
if not line.strip(): continue

Expand Down Expand Up @@ -260,7 +260,7 @@ def strip_each_in(lst):
if len(line) > 2:
out[line[1]] = strip_each_in(line[2].split(','))
return out

def _parse_summary(self):
"""Grab signature (if given) and summary"""
if self._is_at_section():
Expand All @@ -277,7 +277,7 @@ def _parse_summary(self):

if not self._is_at_section():
self['Extended Summary'] = self._read_to_next_section()

def _parse(self):
self._doc.reset()
self._parse_summary()
Expand Down Expand Up @@ -442,7 +442,7 @@ def get_func(self):
else:
func = self._f
return func, func_name

def __str__(self):
out = ''

Expand Down
2 changes: 1 addition & 1 deletion doc/sphinxext/ipython_console_highlighting.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ class IPythonConsoleLexer(Lexer):
- It assumes the default IPython prompts, not customized ones.
"""

name = 'IPython console session'
aliases = ['ipython']
mimetypes = ['text/x-ipython-console']
Expand Down
2 changes: 1 addition & 1 deletion doc/sphinxext/numpydoc.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def initialize(app):
def setup(app, get_doc_object_=get_doc_object):
global get_doc_object
get_doc_object = get_doc_object_

app.connect('autodoc-process-docstring', mangle_docstrings)
app.connect('builder-inited', initialize)
app.add_config_value('numpydoc_edit_link', None, True)
Expand Down
2 changes: 1 addition & 1 deletion examples/camino_dti_tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def get_affine(volume):

"""
In this tutorial we implement probabilistic tractography using the PICo algorithm.
PICo tractography requires an estimate of the fibre direction and a model of its
PICo tractography requires an estimate of the fibre direction and a model of its
uncertainty in each voxel; this is produced using the following node.
"""

Expand Down
2 changes: 1 addition & 1 deletion examples/connectivity_tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
These are written by Stephan Gerhard and can be obtained from:
http://www.cmtk.org/
Or on github at:
CFFlib: https://github.com/LTS5/cfflib
Expand Down
52 changes: 26 additions & 26 deletions examples/dartmouth_workshop_2010.py
Original file line number Diff line number Diff line change
@@ -1,32 +1,32 @@
"""
=================================
Dartmouth College Workshop 2010
Dartmouth College Workshop 2010
=================================
First lets go to the directory with the data we'll be working on and start the interactive python interpreter
(with some nipype specific configuration). Note that nipype does not need to be run through ipython - it is
First lets go to the directory with the data we'll be working on and start the interactive python interpreter
(with some nipype specific configuration). Note that nipype does not need to be run through ipython - it is
just much nicer to do interactive work in it.
.. sourcecode:: bash
cd $TDPATH
ipython -p nipype
For every neuroimaging procedure supported by nipype there exists a wrapper - a small piece of code managing
For every neuroimaging procedure supported by nipype there exists a wrapper - a small piece of code managing
the underlying software (FSL, SPM, AFNI etc.). We call those interfaces. They are standarised so we can hook them up
together. Lets have a look at some of them.
.. sourcecode:: ipython
In [1]: import nipype.interfaces.fsl as fsl
In [2]: fsl.BET.help()
Inputs
------
Mandatory:
in_file: input file to skull strip
Optional:
args: Additional parameters to the command
center: center of gravity in voxels
Expand All @@ -46,7 +46,7 @@
skull: create skull image
threshold: apply thresholding to segmented brain image and mask
vertical_gradient: vertical gradient in fractional intensity threshold (-1, 1)
Outputs
-------
mask_file: path/name of binary brain mask (if generated)
Expand All @@ -59,16 +59,16 @@
In [4]: fs.Smooth.help()
Inputs
------
Mandatory:
in_file: source volume
num_iters: number of iterations instead of fwhm
mutually exclusive: surface_fwhm
reg_file: registers volume to surface anatomical
reg_file: registers volume to surface anatomical
surface_fwhm: surface FWHM in mm
mutually exclusive: num_iters
requires: reg_file
Optional:
args: Additional parameters to the command
environ: Environment variables (default={})
Expand All @@ -79,15 +79,15 @@
smoothed_file: output volume
subjects_dir: subjects directory
vol_fwhm: volumesmoothing outside of surface
Outputs
-------
args: Additional parameters to the command
environ: Environment variables
smoothed_file: smoothed input volume
subjects_dir: subjects directory
You can read about all of the interfaces implemented in nipype at our online documentation at http://nipy.sourceforge.net/nipype/documentation.html#documentation .
You can read about all of the interfaces implemented in nipype at our online documentation at http://nipy.sourceforge.net/nipype/documentation.html#documentation .
Check it out now.
Using interfaces
Expand All @@ -101,7 +101,7 @@
print result

"""
Running a single program is not much of a breakthrough. Lets run motion correction followed by smoothing
Running a single program is not much of a breakthrough. Lets run motion correction followed by smoothing
(isotropic - in other words not using SUSAN). Notice that in the first line we are setting the output data type
for all FSL interfaces.
"""
Expand Down Expand Up @@ -132,15 +132,15 @@
motion_correct_and_smooth.base_dir = os.path.abspath('.') # define where will be the root folder for the workflow
motion_correct_and_smooth.connect([
(motion_correct, smooth, [('out_file', 'in_file')])
])
])
# we are connecting 'out_file' output of motion_correct to 'in_file' input of smooth
motion_correct_and_smooth.run()

"""
Another workflow
----------------
Another example of a simple workflow (calculate the mean of fMRI signal and subtract it).
Another example of a simple workflow (calculate the mean of fMRI signal and subtract it).
This time we'll be assigning inputs after defining the workflow.
"""

Expand Down Expand Up @@ -205,7 +205,7 @@
Datasink is a special interface for copying and arranging results.
"""

import nipype.interfaces.io as nio

preproc.inputs.inputspec.func = os.path.abspath('data/s1/f3.nii')
Expand All @@ -218,15 +218,15 @@
('maskfunc3.out_file', 'funcruns')])
])
preprocess.run()

"""
Datagrabber
-----------
Datagrabber is (surprise, surprise) an interface for collecting files from hard drive. It is very flexible and
supports almost any file organisation of your data you can imagine.
supports almost any file organisation of your data you can imagine.
"""

datasource1 = nio.DataGrabber()
datasource1.inputs.template = 'data/s1/f3.nii'
results = datasource1.run()
Expand All @@ -248,13 +248,13 @@
datasource4.inputs.run = [3, 7]
datasource4.inputs.subject_id = ['s1', 's3']
results = datasource4.run()
print results.outputs
print results.outputs

"""
Iterables
---------
Iterables is a special field of the Node class that enables to iterate all workfloes/nodes connected to it over
Iterables is a special field of the Node class that enables to iterate all workfloes/nodes connected to it over
some parameters. Here we'll use it to iterate over two subjects.
"""

Expand Down
4 changes: 2 additions & 2 deletions examples/dtk_dti_tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import nipype.interfaces.io as nio # Data i/o
import nipype.interfaces.fsl as fsl # fsl
import nipype.workflows.fsl as fsl_wf # fsl
import nipype.interfaces.diffusion_toolkit as dtk
import nipype.interfaces.diffusion_toolkit as dtk
import nipype.interfaces.utility as util # utility
import nipype.pipeline.engine as pe # pypeline engine
import os # system functions
Expand Down Expand Up @@ -102,7 +102,7 @@

datasource.inputs.template = "%s/%s"

# This needs to point to the fdt folder you can find after extracting
# This needs to point to the fdt folder you can find after extracting
# http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz
datasource.inputs.base_directory = os.path.abspath('fsl_course_data/fdt/')

Expand Down
4 changes: 2 additions & 2 deletions examples/dtk_odf_tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import nipype.interfaces.io as nio # Data i/o
import nipype.interfaces.fsl as fsl # fsl
import nipype.workflows.fsl as fsl_wf # fsl
import nipype.interfaces.diffusion_toolkit as dtk
import nipype.interfaces.diffusion_toolkit as dtk
import nipype.interfaces.utility as util # utility
import nipype.pipeline.engine as pe # pypeline engine
import os # system functions
Expand Down Expand Up @@ -102,7 +102,7 @@

datasource.inputs.template = "%s/%s"

# This needs to point to the fdt folder you can find after extracting
# This needs to point to the fdt folder you can find after extracting
# http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz
datasource.inputs.base_directory = os.path.abspath('data')

Expand Down
Loading

0 comments on commit 1c11e63

Please sign in to comment.