From 8cadf62411e461da0fb4e87b84aa898eee391b3b Mon Sep 17 00:00:00 2001
From: Paul McCarthy <pauld.mccarthy@gmail.com>
Date: Tue, 25 Apr 2017 14:59:14 +0100
Subject: [PATCH] Removing spurious whitespace (just found out that emacs can
 do it for me).

---
 fsl/__init__.py             |   2 +-
 fsl/data/atlases.py         | 112 ++++++++++----------
 fsl/data/dtifit.py          |  47 +++++----
 fsl/data/featanalysis.py    |  46 ++++-----
 fsl/data/featdesign.py      |  90 ++++++++--------
 fsl/data/featimage.py       |  52 +++++-----
 fsl/data/fixlabels.py       |  34 +++----
 fsl/data/gifti.py           |  22 ++--
 fsl/data/image.py           | 198 ++++++++++++++++++------------------
 fsl/data/imagewrapper.py    | 140 ++++++++++++-------------
 fsl/data/melodicanalysis.py |  22 ++--
 fsl/data/melodicimage.py    |  22 ++--
 fsl/data/mesh.py            |  28 ++---
 fsl/data/vest.py            |   2 +-
 fsl/data/volumelabels.py    |  32 +++---
 fsl/scripts/imcp.py         |   6 +-
 fsl/scripts/immv.py         |   2 +-
 fsl/utils/async.py          |  58 +++++------
 fsl/utils/cache.py          |  20 ++--
 fsl/utils/imcp.py           |  32 +++---
 fsl/utils/memoize.py        |  20 ++--
 fsl/utils/notifier.py       |  30 +++---
 fsl/utils/path.py           |  70 ++++++-------
 fsl/utils/platform.py       |  32 +++---
 fsl/utils/settings.py       |  24 ++---
 fsl/utils/transform.py      |  58 +++++------
 fsl/utils/weakfuncref.py    |  18 ++--
 27 files changed, 613 insertions(+), 606 deletions(-)

diff --git a/fsl/__init__.py b/fsl/__init__.py
index 696e4ba25..ea77063fc 100644
--- a/fsl/__init__.py
+++ b/fsl/__init__.py
@@ -5,7 +5,7 @@
 # Author: Paul McCarthy <pauldmccarthy@gmail.com>
 #
 """The :mod:`fsl` package is a library which contains convenience classes
-and functions for use by FSL python tools. It is broadly split into the 
+and functions for use by FSL python tools. It is broadly split into the
 following sub-packages:
 
 .. autosummary::
diff --git a/fsl/data/atlases.py b/fsl/data/atlases.py
index 6fd9d4bea..db1925fa4 100644
--- a/fsl/data/atlases.py
+++ b/fsl/data/atlases.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# atlases.py - API which provides access to the atlas image files contained 
+# atlases.py - API which provides access to the atlas image files contained
 #              in $FSLDIR/data/atlases/
 #
 # Author: Paul McCarthy <pauldmccarthy@gmail.com>
@@ -68,7 +68,7 @@ class AtlasRegistry(notifier.Notifier):
     ``$FSLDIR/data/atlases``, and builds a list of :class:`AtlasDescription`
     instances, each of which contains information about one atlas.
 
-    
+
     The :meth:`addAtlas` method allows other atlases to be added to the
     registry. Whenever a new atlas is added, the ``AtlasRegistry`` notifies
     any registered listeners via the :class:`.Notifier` interface with the
@@ -81,18 +81,18 @@ class AtlasRegistry(notifier.Notifier):
 
     The ``AtlasRegistry`` stores a list of all known atlases via the
     :mod:`.settings` module. When an ``AtlasRegistry`` is created, it loads
-    in any previously known atlases. Whenever a new atlas is added, this 
+    in any previously known atlases. Whenever a new atlas is added, this
     list is updated. See the :meth:`__getKnownAtlases` and
     :meth:`_saveKnownAtlases` methods.
     """
 
-    
+
     def __init__(self):
         """Create an ``AtlasRegistry``. """
 
         # A list of all AtlasDescription
-        # instances in existence, sorted 
-        # by AtlasDescription.name. 
+        # instances in existence, sorted
+        # by AtlasDescription.name.
         self.__atlasDescs = []
 
 
@@ -105,9 +105,9 @@ class AtlasRegistry(notifier.Notifier):
         log.debug('Initialising atlas registry')
         self.__atlasDescs = []
 
-        # Get $FSLDIR atlases 
+        # Get $FSLDIR atlases
         fslPaths = []
-        if platform.fsldir is not None: 
+        if platform.fsldir is not None:
             fsldir   = op.join(platform.fsldir, 'data', 'atlases')
             fslPaths = sorted(glob.glob(op.join(fsldir, '*.xml')))
 
@@ -115,14 +115,14 @@ class AtlasRegistry(notifier.Notifier):
         # been loaded in the past
         extraIDs, extraPaths = self.__getKnownAtlases()
 
-        # FSLDIR atlases first, any 
+        # FSLDIR atlases first, any
         # other atlases second.
         atlasPaths = list(fslPaths)         + extraPaths
         atlasIDs   = [None] * len(fslPaths) + extraIDs
 
         with self.skipAll():
             for atlasID, atlasPath in zip(atlasIDs, atlasPaths):
-                
+
                 # The FSLDIR atlases are probably
                 # listed twice - from the above glob,
                 # and from the saved extraPaths. So
@@ -137,7 +137,7 @@ class AtlasRegistry(notifier.Notifier):
                                 'specification {}'.format(atlasPath),
                                 exc_info=True)
 
-    
+
     def listAtlases(self):
         """Returns a list containing :class:`AtlasDescription` objects for
         all available atlases. The atlases are ordered in terms of the
@@ -157,17 +157,17 @@ class AtlasRegistry(notifier.Notifier):
         """Returns an :class:`AtlasDescription` instance describing the
         atlas with the given ``atlasID``.
         """
-        
+
         for desc in self.__atlasDescs:
             if desc.atlasID == atlasID:
                 return desc
-            
+
         raise KeyError('Unknown atlas ID: {}'.format(atlasID))
 
 
     def loadAtlas(self, atlasID, loadSummary=False, resolution=None):
         """Loads and returns an :class:`Atlas` instance for the atlas
-        with the given  ``atlasID``. 
+        with the given  ``atlasID``.
 
         :arg loadSummary: If ``True``, a 3D :class:`LabelAtlas` image is
                           loaded. Otherwise, if the atlas is probabilistic,
@@ -203,8 +203,8 @@ class AtlasRegistry(notifier.Notifier):
                        atlas with the given ID already exists, this new atlas
                        is given a unique id.
 
-        :arg save:     If ``True`` (the default), this atlas will be saved 
-                       so that it will be available in future instantiations. 
+        :arg save:     If ``True`` (the default), this atlas will be saved
+                       so that it will be available in future instantiations.
         """
 
         filename = op.abspath(filename)
@@ -233,7 +233,7 @@ class AtlasRegistry(notifier.Notifier):
             self.__saveKnownAtlases()
 
         self.notify(topic='add', value=desc)
-            
+
         return desc
 
 
@@ -248,15 +248,15 @@ class AtlasRegistry(notifier.Notifier):
                 log.debug('Removing atlas from registry: {} / {}'.format(
                     desc.atlasID,
                     desc.specPath))
-                
+
                 self.__atlasDescs.pop(i)
                 break
-        
+
         self.__saveKnownAtlases()
 
         self.notify(topic='remove', value=desc)
 
-    
+
     def __getKnownAtlases(self):
         """Returns a list of tuples containing the IDs and paths of all known
         atlases .
@@ -282,15 +282,15 @@ class AtlasRegistry(notifier.Notifier):
             paths = [e[1] for e in atlases]
 
             return names, paths
-            
+
         except:
             return [], []
 
-    
+
     def __saveKnownAtlases(self):
         """Saves the IDs and paths of all atlases which are currently in
         the registry. The atlases are saved via the :mod:`.settings` module.
-        """ 
+        """
 
         if self.__atlasDescs is None:
             return
@@ -302,10 +302,10 @@ class AtlasRegistry(notifier.Notifier):
 
         atlases = ['{}={}'.format(name, path) for name, path in atlases]
         atlases = op.pathsep.join(atlases)
-        
+
         fslsettings.write('fsl.data.atlases', atlases)
 
-    
+
 class AtlasDescription(object):
     """An ``AtlasDescription`` instance parses and stores the information
     stored in the FSL XML file that describes a single FSL atlas.  An XML
@@ -325,11 +325,11 @@ class AtlasDescription(object):
                                 # label, Otherwise, if type is
                                 # Label, path to 3D label file
                                 # (identical to the summaryimagefile
-                                # below). The path must be specified 
+                                # below). The path must be specified
                                 # as relative to the location of this
                                 # XML file.
 
-            <summaryimagefile>  # Path to 3D summary file, with each 
+            <summaryimagefile>  # Path to 3D summary file, with each
             </summaryimagefile> # region having value (index + 1)
 
            </images>
@@ -342,7 +342,7 @@ class AtlasDescription(object):
          # index - For probabilistic atlases, index of corresponding volume in
          #         4D image file. For label images, the value of voxels which
          #         are in the corresponding region.
-         # 
+         #
          # x    |
          # y    |- XYZ *voxel* coordinates into the first image of the <images>
          #      |  list
@@ -352,7 +352,7 @@ class AtlasDescription(object):
         </data>
        </atlas>
 
-    
+
     Each ``AtlasDescription`` is assigned an identifier, which is simply the
     XML file name describing the atlas, sans-suffix, and converted to lower
     case.  For exmaple, the atlas described by:
@@ -363,7 +363,7 @@ class AtlasDescription(object):
 
         ``harvardoxford-cortical``
 
-    
+
     This identifier is intended to be unique.
 
 
@@ -371,26 +371,26 @@ class AtlasDescription(object):
 
     ================= ======================================================
     ``atlasID``       The atlas ID, as described above.
-    
+
     ``name``          Name of the atlas.
 
     ``specPath``      Path to the atlas XML specification file.
-    
+
     ``atlasType``     Atlas type - either *probabilistic* or *label*.
-    
+
     ``images``        A list of images available for this atlas - usually
                       :math:`1mm^3` and :math:`2mm^3` images are present.
-    
+
     ``summaryImages`` For probabilistic atlases, a list of *summary* images,
                       which are just 3D labelled variants of the atlas.
-    
+
     ``pixdims``       A list of ``(x, y, z)`` pixdim tuples in mm, one for
                       each image in ``images``.
 
     ``xforms``        A list of affine transformation matrices (as ``4*4``
                       ``numpy`` arrays), one for each image in ``images``,
                       defining the voxel to world coordinate transformations.
-    
+
     ``labels``        A list of ``AtlasLabel`` objects, describing each
                       region / label in the atlas.
     ================= ======================================================
@@ -417,7 +417,7 @@ class AtlasDescription(object):
               MNI152 space).
     """
 
-    
+
     def __init__(self, filename, atlasID=None):
         """Create an ``AtlasDescription`` instance.
 
@@ -440,7 +440,7 @@ class AtlasDescription(object):
         self.specPath  = op.abspath(filename)
         self.name      = header.find('name').text
         self.atlasType = header.find('type').text.lower()
- 
+
         # Spelling error in some of the atlas.xml files.
         if self.atlasType == 'probabalistic':
             self.atlasType = 'probabilistic'
@@ -452,7 +452,7 @@ class AtlasDescription(object):
         self.xforms        = []
 
         atlasDir = op.dirname(self.specPath)
-        
+
         for image in images:
             imagefile        = image.find('imagefile')       .text
             summaryimagefile = image.find('summaryimagefile').text
@@ -485,7 +485,7 @@ class AtlasDescription(object):
         coords = np.zeros((len(labels), 3), dtype=np.float32)
 
         for i, label in enumerate(labels):
-            
+
             al        = AtlasLabel()
             al.name   = label.text
             al.index  = int(  label.attrib['index'])
@@ -502,7 +502,7 @@ class AtlasDescription(object):
         # into world coordinates
         coords = transform.transform(coords, self.xforms[0])
 
-        # Update the coordinates 
+        # Update the coordinates
         # in our label objects
         for i, label in enumerate(self.labels):
 
@@ -514,12 +514,12 @@ class AtlasDescription(object):
         """
         return self.atlasID == other.atlasID
 
-    
+
     def __neq__(self, other):
         """Compares the ``atlasID`` of this ``AtlasDescription`` with another.
         """
-        return self.atlasID != other.atlasID 
-    
+        return self.atlasID != other.atlasID
+
 
     def __cmp__(self, other):
         """Compares this ``AtlasDescription`` with another by their ``name``
@@ -534,11 +534,11 @@ class Atlas(fslimage.Image):
     logic common to both.
     """
 
-    
+
     def __init__(self, atlasDesc, resolution=None, isLabel=False):
         """Initialise an ``Atlas``.
 
-        :arg atlasDesc:  The :class:`AtlasDescription` instance which 
+        :arg atlasDesc:  The :class:`AtlasDescription` instance which
                          describes the atlas.
 
         :arg resolution: Desired isotropic resolution in millimetres.
@@ -552,13 +552,13 @@ class Atlas(fslimage.Image):
         # If a reslution has not been provided,
         # choose the atlas image with the
         # highest resolution.
-        # 
+        #
         # We divide by three to get the atlas
         # image index because there are three
         # pixdim values for each atlas.
         res   = resolution
         reses = np.concatenate(atlasDesc.pixdims)
- 
+
         if resolution is None: imageIdx = np.argmin(reses)               / 3
         else:                  imageIdx = np.argmin(np.abs(reses - res)) / 3
 
@@ -575,7 +575,7 @@ class Atlas(fslimage.Image):
 
         self.desc = atlasDesc
 
-        
+
 class LabelAtlas(Atlas):
     """A 3D atlas which contains integer labels for each region.
 
@@ -593,7 +593,7 @@ class LabelAtlas(Atlas):
         """
         Atlas.__init__(self, atlasDesc, resolution, True)
 
-        
+
     def label(self, worldLoc):
         """Looks up and returns the label of the region at the given world
         location, or ``None`` if the location is out of bounds.
@@ -609,16 +609,16 @@ class LabelAtlas(Atlas):
            voxelLoc[1] >= self.shape[1] or \
            voxelLoc[2] >= self.shape[2]:
             return None
-        
+
         val = self[voxelLoc[0], voxelLoc[1], voxelLoc[2]]
 
         if self.desc.atlasType == 'label':
             return val
-        
+
         elif self.desc.atlasType == 'probabilistic':
             return val - 1
 
-    
+
 class ProbabilisticAtlas(Atlas):
     """A 4D atlas which contains one volume for each region.
 
@@ -633,10 +633,10 @@ class ProbabilisticAtlas(Atlas):
                          the atlas.
 
         :arg resolution: Desired isotropic resolution in millimetres.
-        """ 
+        """
         Atlas.__init__(self, atlasDesc, resolution, False)
 
-        
+
     def proportions(self, worldLoc):
         """Looks up the region probabilities for the given location.
 
@@ -657,7 +657,7 @@ class ProbabilisticAtlas(Atlas):
            voxelLoc[1] >= self.shape[1] or \
            voxelLoc[2] >= self.shape[2]:
             return []
-        
+
         return self[voxelLoc[0], voxelLoc[1], voxelLoc[2], :]
 
 
diff --git a/fsl/data/dtifit.py b/fsl/data/dtifit.py
index eafc123cc..a2c36fb33 100644
--- a/fsl/data/dtifit.py
+++ b/fsl/data/dtifit.py
@@ -5,7 +5,7 @@
 # Author: Paul McCarthy <pauldmccarthy@gmail.com>
 #
 """This module provides the :class:`.DTIFitTensor` class, which encapsulates
-the diffusion tensor data generated by the FSL ``dtifit`` tool. 
+the diffusion tensor data generated by the FSL ``dtifit`` tool.
 
 The following utility functions are also defined:
 
@@ -34,11 +34,11 @@ log = logging.getLogger(__name__)
 
 
 def getDTIFitDataPrefix(path):
-    """Returns the prefix (a.k,a, base name) used for the ``dtifit`` file 
-    names in the given directory, or ``None`` if the ``dtifit`` files could 
+    """Returns the prefix (a.k,a, base name) used for the ``dtifit`` file
+    names in the given directory, or ``None`` if the ``dtifit`` files could
     not be identified.
     """
-    
+
     v1s   = glob.glob(op.join(path, '*_V1.*'))
     v2s   = glob.glob(op.join(path, '*_V2.*'))
     v3s   = glob.glob(op.join(path, '*_V3.*'))
@@ -58,8 +58,8 @@ def getDTIFitDataPrefix(path):
 
         if prefix not in prefixes: prefixes[prefix] = [f]
         else:                      prefixes[prefix].append(f)
-                
-    # Discard any prefixes which are 
+
+    # Discard any prefixes which are
     # not present for every file type.
     for prefix, files in list(prefixes.items()):
         if len(files) != 6:
@@ -82,7 +82,7 @@ def getDTIFitDataPrefix(path):
 
     # If there's more than one remaining
     # prefix, I don't know what to do -
-    # just return the first one. 
+    # just return the first one.
     if len(prefixes) > 1:
         log.warning('Multiple dtifit prefixes detected: {}'.format(prefixes))
 
@@ -93,12 +93,12 @@ def isDTIFitPath(path):
     """Returns ``True`` if the given directory path looks like it contains
     ``dtifit`` data, ``False`` otherwise.
     """
-    
+
     return getDTIFitDataPrefix(path) is not None
 
 
 def looksLikeTensorImage(image):
-    """Returns ``True`` if the given :class:`.Image` looks like it could 
+    """Returns ``True`` if the given :class:`.Image` looks like it could
     contain tensor matrix data, ``False`` otherwise.
     """
 
@@ -114,7 +114,8 @@ def decomposeTensorMatrix(data):
                 the unique elements of diffusion tensor matrices at
                 every voxel.
 
-    :returns:   
+    :returns:   A tuple containing the principal eigenvectors and
+                eigenvalues of the tensor matrix.
     """
 
     # The image contains 6 volumes, corresponding
@@ -141,13 +142,13 @@ def decomposeTensorMatrix(data):
     matrices[:, 2, 1] = data[..., 4].flat
     matrices[:, 2, 2] = data[..., 5].flat
 
-    # Calculate the eigenvectors and 
+    # Calculate the eigenvectors and
     # values on all of those matrices
     vals, vecs = npla.eig(matrices)
     vecShape   = list(shape) + [3]
 
-    # Grr, np.linalg.eig does not 
-    # sort the eigenvalues/vectors, 
+    # Grr, np.linalg.eig does not
+    # sort the eigenvalues/vectors,
     # so we have to do it ourselves.
     order = vals.argsort(axis=1)
     i     = np.arange(nvoxels)[:, np.newaxis]
@@ -173,7 +174,7 @@ class DTIFitTensor(fslimage.Nifti):
     separate NIFTI images.
     """
 
-    
+
     def __init__(self, path):
         """Create a ``DTIFitTensor``.
 
@@ -202,9 +203,15 @@ class DTIFitTensor(fslimage.Nifti):
         self.dataSource = op.abspath(path)
         self.name       = '{}'.format(op.basename(path))
 
-    def V1(self): return self.__v1
-    def V2(self): return self.__v2
-    def V3(self): return self.__v3
-    def L1(self): return self.__l1
-    def L2(self): return self.__l2
-    def L3(self): return self.__l3
+    def V1(self):
+        return self.__v1
+    def V2(self):
+        return self.__v2
+    def V3(self):
+        return self.__v3
+    def L1(self):
+        return self.__l1
+    def L2(self):
+        return self.__l2
+    def L3(self):
+        return self.__l3
diff --git a/fsl/data/featanalysis.py b/fsl/data/featanalysis.py
index c3f706242..a35d81b54 100644
--- a/fsl/data/featanalysis.py
+++ b/fsl/data/featanalysis.py
@@ -51,7 +51,7 @@ import fsl.utils.path      as fslpath
 import fsl.utils.transform as transform
 
 from . import image as fslimage
-from . import          featdesign 
+from . import          featdesign
 
 
 log = logging.getLogger(__name__)
@@ -70,7 +70,7 @@ def isFEATImage(path):
     dirname  = op.dirname( path)
     filename = op.basename(path)
 
-    return filename.startswith('filtered_func_data') and isFEATDir(dirname) 
+    return filename.startswith('filtered_func_data') and isFEATDir(dirname)
 
 
 def isFEATDir(path):
@@ -88,10 +88,10 @@ def isFEATDir(path):
     """
 
     path = op.abspath(path)
-    
+
     if op.isdir(path): dirname = path
     else:              dirname = op.dirname(path)
-    
+
     if not dirname.endswith('.feat'):
         return False
 
@@ -99,11 +99,11 @@ def isFEATDir(path):
         fslimage.addExt(op.join(dirname, 'filtered_func_data'), mustExist=True)
     except fslimage.PathError:
         return False
-    
+
     if not op.exists(op.join(dirname, 'design.fsf')): return False
     if not op.exists(op.join(dirname, 'design.mat')): return False
     if not op.exists(op.join(dirname, 'design.con')): return False
-        
+
     return True
 
 
@@ -117,7 +117,7 @@ def hasStats(featdir):
         return True
     except:
         return False
-        
+
 
 def hasMelodicDir(featdir):
     """Returns ``True`` if the data for the given FEAT directory has had
@@ -150,7 +150,7 @@ def getReportFile(featdir):
     """Returns the path to the FEAT report index file, or ``None`` if there
     is no report.
     """
-    
+
     report = op.join(featdir, 'report.html')
     if op.exists(report): return report
     else:                 return None
@@ -158,9 +158,9 @@ def getReportFile(featdir):
 
 def loadContrasts(featdir):
     """Loads the contrasts from a FEAT directory. Returns a tuple containing:
-    
+
       - A list of names, one for each contrast.
-    
+
       - A list of contrast vectors (each of which is a list itself).
 
     :arg featdir: A FEAT directory.
@@ -172,7 +172,7 @@ def loadContrasts(featdir):
     designcon    = op.join(featdir, 'design.con')
 
     log.debug('Loading FEAT contrasts from {}'.format(designcon))
-    
+
     with open(designcon, 'rt') as f:
 
         while True:
@@ -183,7 +183,7 @@ def loadContrasts(featdir):
                 num        = [c for c in tkns[0] if c.isdigit()]
                 num        = int(''.join(num))
 
-                # The /ContrastName field may not 
+                # The /ContrastName field may not
                 # actually have a name specified
                 if len(tkns) > 1:
                     name       = tkns[1].strip()
@@ -248,7 +248,7 @@ def loadSettings(featdir):
                 key = key[5:-1]
 
             settings[key] = val
-    
+
     return settings
 
 
@@ -259,7 +259,7 @@ def loadDesign(featdir, settings):
 
     :arg settings: Dictionary containing FEAT settings (see
                    :func:`loadSettings`).
-    
+
     :returns:      a :class:`.FEATFSFDesign` instance which represents the
                    design matrix.
     """
@@ -273,7 +273,7 @@ def getThresholds(settings):
 
     The following keys will be present. Threshold values will be ``None``
     if the respective statistical thresholding was not carried out:
-    
+
       - ``p``: P-value thresholding
       - ``z``: Z-statistic thresholding
 
@@ -378,14 +378,14 @@ def loadClusterResults(featdir, settings, contrast):
     class Cluster(object):
         def __init__(self, **kwargs):
             for name, val in kwargs.items():
-                
+
                 attrName = colmap[name]
                 if val is not None:
                     val = float(val)
-                    
+
                 setattr(self, attrName, val)
 
-    # This dict provides a mapping between 
+    # This dict provides a mapping between
     # Cluster object attribute names, and
     # the corresponding column name in the
     # cluster.txt file.
@@ -484,7 +484,7 @@ def getDataFile(featdir):
 
 
 def getMelodicFile(featdir):
-    """Returns the name of the file in the FEAT results which contains the 
+    """Returns the name of the file in the FEAT results which contains the
     melodic components (if melodic ICA was performed as part of the FEAT
     analysis). This file can be loaded as a :class:`.MelodicImage`.
 
@@ -505,7 +505,7 @@ def getResidualFile(featdir):
     resfile = op.join(featdir, 'stats', 'res4d')
     return fslimage.addExt(resfile, mustExist=True)
 
-    
+
 def getPEFile(featdir, ev):
     """Returns the path of the PE file for the specified EV.
 
@@ -524,7 +524,7 @@ def getCOPEFile(featdir, contrast):
     Raises a :exc:`~fsl.utils.path.PathError` if the file does not exist.
 
     :arg featdir:  A FEAT directory.
-    :arg contrast: The contrast number (0-indexed). 
+    :arg contrast: The contrast number (0-indexed).
     """
     copefile = op.join(featdir, 'stats', 'cope{}'.format(contrast + 1))
     return fslimage.addExt(copefile, mustExist=True)
@@ -536,7 +536,7 @@ def getZStatFile(featdir, contrast):
     Raises a :exc:`~fsl.utils.path.PathError` if the file does not exist.
 
     :arg featdir:  A FEAT directory.
-    :arg contrast: The contrast number (0-indexed). 
+    :arg contrast: The contrast number (0-indexed).
     """
     zfile = op.join(featdir, 'stats', 'zstat{}'.format(contrast + 1))
     return fslimage.addExt(zfile, mustExist=True)
@@ -548,7 +548,7 @@ def getClusterMaskFile(featdir, contrast):
     Raises a :exc:`~fsl.utils.path.PathError` if the file does not exist.
 
     :arg featdir:  A FEAT directory.
-    :arg contrast: The contrast number (0-indexed). 
+    :arg contrast: The contrast number (0-indexed).
     """
     mfile = op.join(featdir, 'cluster_mask_zstat{}'.format(contrast + 1))
     return fslimage.addExt(mfile, mustExist=True)
diff --git a/fsl/data/featdesign.py b/fsl/data/featdesign.py
index 0a3de55e6..ee5843064 100644
--- a/fsl/data/featdesign.py
+++ b/fsl/data/featdesign.py
@@ -129,16 +129,16 @@ class FEATFSFDesign(object):
     analysis. This class is intended to be used for FEAT analyses generated
     with FSL 5.0.9 and older.
     """
-    
+
     def __init__(self, featDir, settings=None, loadVoxelwiseEVs=True):
         """Create a ``FEATFSFDesign``.
 
         :arg featDir:          Path to the FEAT directory.
 
-        :arg settings:         A dictionary containing the FEAT analysis 
-                               settings from its ``design.fsf``. If not 
-                               provided, is loaded via 
-                               :func:`.featanalysis.loadSettings`. 
+        :arg settings:         A dictionary containing the FEAT analysis
+                               settings from its ``design.fsf``. If not
+                               provided, is loaded via
+                               :func:`.featanalysis.loadSettings`.
 
         :arg loadVoxelwiseEVs: If ``True`` (the default), image files
                                for all voxelwise EVs are loaded. Otherwise
@@ -157,7 +157,7 @@ class FEATFSFDesign(object):
         version      = float(settings['version'])
         level        = int(  settings['level'])
 
-        # Print a warning if we're 
+        # Print a warning if we're
         # using an old version of FEAT
         if version < 6:
             log.warning('Unsupported FEAT version: {}'.format(version))
@@ -181,14 +181,14 @@ class FEATFSFDesign(object):
         # so they're ready to be used by
         # the getDesign method.
         for ev in self.__evs:
-            
+
             if not isinstance(ev, (VoxelwiseEV, VoxelwiseConfoundEV)):
                 continue
 
-            ev.image = None 
+            ev.image = None
 
-            # The path to some voxelwise 
-            # EVs may not be present - 
+            # The path to some voxelwise
+            # EVs may not be present -
             # see the VoxelwisEV class.
             if loadVoxelwiseEVs and (ev.filename is not None):
                 ev.image = fslimage.Image(ev.filename)
@@ -199,8 +199,8 @@ class FEATFSFDesign(object):
         each column of this ``FEATFSFDesign``.
         """
         return list(self.__evs)
- 
-    
+
+
     def getDesign(self, voxel=None):
         """Returns the design matrix for the specified voxel.
 
@@ -254,7 +254,7 @@ class EV(object):
         self.index = index
         self.title = title
 
-        
+
 class NormalEV(EV):
     """Class representing a *normal* EV in a FEAT design matrix, i.e. one
     which has been explicitly provided by the user.
@@ -284,7 +284,7 @@ class TemporalDerivativeEV(NormalEV):
     """Class representing a temporal derivative EV, derived from a normal EV.
     """
     pass
-                          
+
 
 class BasisFunctionEV(NormalEV):
     """Class representing a basis function EV, derived from a normal EV. """
@@ -339,7 +339,7 @@ class ConfoundEV(EV):
     ============= ==========================================================
     ``confIndex`` Index of this ``ConfoundEV`` (starting from 0) in relation
                   to all other confound EVs.
-    ============= ========================================================== 
+    ============= ==========================================================
     """
     def __init__(self, index, confIndex, title):
         """Create a ``ConfoundEV``.
@@ -352,7 +352,7 @@ class ConfoundEV(EV):
         """
         EV.__init__(self, index, title)
         self.confIndex = confIndex
- 
+
 
 class MotionParameterEV(EV):
     """Class representing a motion parameter EV.
@@ -361,10 +361,10 @@ class MotionParameterEV(EV):
     addition to the :class:`EV` attributes):
 
     =============== ========================================================
-    ``motionIndex`` Index of this ``MotionParameterEV`` (starting from 0) in 
+    ``motionIndex`` Index of this ``MotionParameterEV`` (starting from 0) in
                     relation to all other motion parameter EVs.
     =============== ========================================================
-    """ 
+    """
     def __init__(self, index, motionIndex, title):
         """Create a ``MotionParameterEV``.
 
@@ -373,33 +373,33 @@ class MotionParameterEV(EV):
         :arg confIndex: Index (starting from 0) of this ``MotionParameterEV``
                         in relation to all other motion parameter EVs.
         :arg title:     Name of this ``MotionParameterEV``.
-        """ 
+        """
         EV.__init__(self, index, title)
-        self.motionIndex = motionIndex 
+        self.motionIndex = motionIndex
 
 
 class VoxelwiseConfoundEV(EV):
     """Class representing a voxelwise confound EV.
 
-    ``VoxelwiseConfoundEV`` instances contain the following attributes (in 
+    ``VoxelwiseConfoundEV`` instances contain the following attributes (in
     addition to the :class:`EV` attributes):
 
     ============ ==========================================================
-    ``voxIndex`` Index of this ``VoxelwiseConfoundEV`` (starting from 0) in 
+    ``voxIndex`` Index of this ``VoxelwiseConfoundEV`` (starting from 0) in
                  relation to all other voxelwise confound EVs.
     ``filename`` Path to the image file containing the data for this EV
-    ============ ========================================================== 
-    """ 
+    ============ ==========================================================
+    """
     def __init__(self, index, voxIndex, title, filename):
         """Create a ``Voxelwise ConfoundEV``.
 
-        :arg index:     Index (starting from 0) of this 
+        :arg index:     Index (starting from 0) of this
                         ``VoxelwiseConfoundEV`` in the design matrix.
-        :arg confIndex: Index (starting from 0) of this 
+        :arg confIndex: Index (starting from 0) of this
                         ``VoxelwiseConfoundEV`` in relation to all other
                         voxelwise confound EVs.
         :arg title:     Name of this ``VoxelwiseConfoundEV``.
-        """        
+        """
         EV.__init__(self, index, title)
         self.voxIndex = voxIndex
 
@@ -408,8 +408,8 @@ class VoxelwiseConfoundEV(EV):
         else:
             log.warning('Voxelwise confound EV file '
                         'does not exist: '.format(filename))
-            self.filename = None 
-    
+            self.filename = None
+
 
 def getFirstLevelEVs(featDir, settings, designMat):
     """Derives the EVs for the given first level FEAT analysis.
@@ -423,20 +423,20 @@ def getFirstLevelEVs(featDir, settings, designMat):
 
     :returns: A list of :class:`EV` instances, one for each column in the
               design matrix.
-    """ 
+    """
 
     evs     = []
     origEVs = int(settings['evs_orig'])
 
     # First, we loop through the EVs that
     # are explicitly defined in design.fsf.
-    # This includes 
+    # This includes
     #   - normal EVs
     #   - temporal derivative EVs
     #   - basis function EVs
     #   - voxelwise EVs
     for origIdx in range(origEVs):
-            
+
         title    = settings[        'evtitle{}'  .format(origIdx + 1)]
         shape    = int(settings[    'shape{}'    .format(origIdx + 1)])
         convolve = int(settings[    'convolve{}' .format(origIdx + 1)])
@@ -445,7 +445,7 @@ def getFirstLevelEVs(featDir, settings, designMat):
 
         # Normal EV. This is just a column
         # in the design matrix, defined by
-        # the user. 
+        # the user.
         if shape != 9:
             evs.append(NormalEV(len(evs), origIdx, title))
 
@@ -463,8 +463,8 @@ def getFirstLevelEVs(featDir, settings, designMat):
             filename = op.join(
                 featDir, 'designVoxelwiseEV{}'.format(origIdx + 1))
             filename = fslimage.addExt(filename, True)
-            
-            evs.append(VoxelwiseEV(len(evs), origIdx, title, filename)) 
+
+            evs.append(VoxelwiseEV(len(evs), origIdx, title, filename))
 
         # This EV has been convolved with a set of basis
         # functions. A set of N additional EVs have been
@@ -491,7 +491,7 @@ def getFirstLevelEVs(featDir, settings, designMat):
     # have been explicilty defined, the rest of
     # the EVs in the design matrix are confounds,
     # in the following order:
-    # 
+    #
     #   1. Voxelwise confounds
     #   2. Motion parameters
     #   3. Other confounds
@@ -513,7 +513,7 @@ def getFirstLevelEVs(featDir, settings, designMat):
     # order defined in vef.dat.
     voxConfFiles = op.join(featDir, 'vef.dat')
     voxConfLocs  = op.join(featDir, 'ven.dat')
-    
+
     if op.exists(voxConfFiles) and op.exists(voxConfLocs):
 
         with open(voxConfFiles, 'rt') as vcff:
@@ -554,14 +554,14 @@ def getFirstLevelEVs(featDir, settings, designMat):
     # Have motion parameters been added
     # as regressors to the design matrix?
     motion = int(settings['motionevs'])
-    
+
     if   motion == 1: numMotionEVs = 6
     elif motion == 2: numMotionEVs = 24
-    else:             numMotionEVs = 0 
+    else:             numMotionEVs = 0
 
     for i in range(numMotionEVs):
         evs.append(MotionParameterEV(len(evs), i, 'motion'))
-        
+
     # Last step - any columns in the design
     # matrix which have not yet been accounted
     # for are other confounds, specified by
@@ -585,7 +585,7 @@ def getHigherLevelEVs(featDir, settings, designMat):
                     :func:`loadDesignMat`).
 
     :returns: A list of :class:`EV` instances, one for each column in the
-              design matrix. 
+              design matrix.
     """
 
     # TODO Maybe I can give the voxel EVs titles based on their
@@ -598,7 +598,7 @@ def getHigherLevelEVs(featDir, settings, designMat):
     #
     #   - Normal EVs
     #   - Voxelwise EVs
-    # 
+    #
     # evs_orig is the number of normal EVs
     # evs_vox is the number of voxelwise EVs
     # evs_real is the total number of EVs
@@ -623,11 +623,11 @@ def getHigherLevelEVs(featDir, settings, designMat):
     # for each voxelwise EV from its file
     # name.
     for origIdx in range(voxEVs):
-        
+
         filename = settings['evs_vox_{}'.format(origIdx + 1)]
         title    = op.basename(fslimage.removeExt(filename))
         evs.append(VoxelwiseEV(len(evs), origIdx, title, filename))
-        
+
     return evs
 
 
@@ -640,7 +640,7 @@ def loadDesignMat(designmat):
     :arg designmat: Path to the ``design.mat`` file.
     """
 
-    matrix = None 
+    matrix = None
 
     log.debug('Loading FEAT design matrix from {}'.format(designmat))
 
diff --git a/fsl/data/featimage.py b/fsl/data/featimage.py
index e8e95b6f8..0b37540ba 100644
--- a/fsl/data/featimage.py
+++ b/fsl/data/featimage.py
@@ -48,7 +48,7 @@ class FEATImage(fslimage.Image):
         # is a contrast vector).
         img.fit([1, 1, 1, 1], [23, 30, 42], fullModel=True)
     """
-    
+
 
     def __init__(self, path, **kwargs):
         """Create a ``FEATImage`` instance.
@@ -75,7 +75,7 @@ class FEATImage(fslimage.Image):
         else:
             design      = None
             names, cons = [], []
-        
+
         fslimage.Image.__init__(self, path, **kwargs)
 
         self.__analysisName  = op.splitext(op.basename(featDir))[0]
@@ -126,7 +126,7 @@ class FEATImage(fslimage.Image):
         """Returns the path to the FEAT report - see
         :func:`.featanalysis.getReportFile`.
         """
-        return featanalysis.getReportFile(self.__featDir) 
+        return featanalysis.getReportFile(self.__featDir)
 
 
     def hasStats(self):
@@ -134,30 +134,30 @@ class FEATImage(fslimage.Image):
         a statistical analysis.
         """
         return self.__design is not None
-        
+
 
     def getDesign(self, voxel=None):
         """Returns the analysis design matrix as a :mod:`numpy` array
         with shape :math:`numPoints\\times numEVs`.
         See :meth:`.FEATFSFDesign.getDesign`.
         """
-        
+
         if self.__design is None:
             return None
-        
+
         return self.__design.getDesign(voxel)
-        
-    
+
+
     def numPoints(self):
         """Returns the number of points (e.g. time points, number of
         subjects, etc) in the analysis.
         """
         if self.__design is None:
             return 0
-        
+
         return self.__design.getDesign().shape[0]
 
-    
+
     def numEVs(self):
         """Returns the number of explanatory variables (EVs) in the analysis.
         """
@@ -169,18 +169,18 @@ class FEATImage(fslimage.Image):
 
     def evNames(self):
         """Returns a list containing the name of each EV in the analysis."""
-        
+
         if self.__design is None:
             return []
-        
+
         return [ev.title for ev in self.__design.getEVs()]
 
-    
+
     def numContrasts(self):
         """Returns the number of contrasts in the analysis."""
         return len(self.__contrasts)
 
-    
+
     def contrastNames(self):
         """Returns a list containing the name of each contrast in the analysis.
         """
@@ -231,19 +231,19 @@ class FEATImage(fslimage.Image):
 
     def getResiduals(self):
         """Returns the residuals of the full model fit. """
-        
+
         if self.__residuals is None:
             resfile = featanalysis.getResidualFile(self.__featDir)
             self.__residuals = fslimage.Image(
                 resfile,
                 name='{}: residuals'.format(self.__analysisName))
-        
+
         return self.__residuals
 
-    
+
     def getCOPE(self, con):
         """Returns the COPE image for the given contrast (0-indexed). """
-        
+
         if self.__copes[con] is None:
             copefile = featanalysis.getPEFile(self.__featDir, con)
             self.__copes[con] = fslimage.Image(
@@ -259,7 +259,7 @@ class FEATImage(fslimage.Image):
     def getZStats(self, con):
         """Returns the Z statistic image for the given contrast (0-indexed).
         """
-        
+
         if self.__zstats[con] is None:
             zfile = featanalysis.getZStatFile(self.__featDir, con)
 
@@ -270,13 +270,13 @@ class FEATImage(fslimage.Image):
                     con + 1,
                     self.contrastNames()[con]))
 
-        return self.__zstats[con] 
+        return self.__zstats[con]
 
 
     def getClusterMask(self, con):
         """Returns the cluster mask image for the given contrast (0-indexed).
         """
-        
+
         if self.__clustMasks[con] is None:
             mfile = featanalysis.getClusterMaskFile(self.__featDir, con)
 
@@ -287,8 +287,8 @@ class FEATImage(fslimage.Image):
                     con + 1,
                     self.contrastNames()[con]))
 
-        return self.__clustMasks[con] 
-            
+        return self.__clustMasks[con]
+
 
     def fit(self, contrast, xyz):
         """Calculates the model fit for the given contrast vector
@@ -351,9 +351,9 @@ def modelFit(data, design, contrast, pes, firstLevel=True):
     :returns: The best fit of the model to the data.
     """
 
-    # Here we are basically trying to 
+    # Here we are basically trying to
     # replicate the behaviour of tsplot.
-    # There are some differences though - 
+    # There are some differences though -
     # by default, tsplot weights the
     # data by Z statistics. We're not
     # doing that here.
@@ -383,7 +383,7 @@ def modelFit(data, design, contrast, pes, firstLevel=True):
 
         ev        = design[:, i]
         pe        = pes[i]
-        modelfit += ev * pe * contrast[i] 
+        modelfit += ev * pe * contrast[i]
 
     # Make sure the model fit has an
     # appropriate mean.  The data in
diff --git a/fsl/data/fixlabels.py b/fsl/data/fixlabels.py
index 5d01ae9b5..d5814d79b 100644
--- a/fsl/data/fixlabels.py
+++ b/fsl/data/fixlabels.py
@@ -24,7 +24,7 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
     to be of the format generated by FIX, Melview or ICA-AROMA; such a file
     should have a structure resembling the following::
 
-    
+
         filtered_func_data.ica
         1, Signal, False
         2, Unclassified Noise, True
@@ -53,7 +53,7 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
               allow you to control the labels assigned to included/excluded
               components.
 
-    
+
     The first line of the file contains the name of the melodic directory.
     Then, one line is present for each component, containing the following,
     separated by commas:
@@ -65,7 +65,7 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
         ``'False'`` otherwise. This field is optional - if the last
         comma-separated token on a line is not equal (case-insensitive)
         to ``True`` or ``False``, it is interpreted as a component label.
-    
+
     The last line of the file contains the index (starting from 1) of all
     *bad* components, i.e. those components which are not classified as
     signal or unknown.
@@ -77,7 +77,7 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
                        components in the list. Defaults to 'Unclassified
                        noise' for FIX-like files, and 'Movement' for
                        ICA-AROMA-like files.
-    
+
     :arg excludeLabel: If the file contains a single line containing component
                        indices, this label will be used for the components
                        that are not in the list.  Defaults to 'Signal' for
@@ -109,14 +109,14 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
     if len(lines) == 1:
 
         line = lines[0]
-            
+
         # if the list is contained in
         # square brackets, we assume
         # that it is a FIX output file,
         # where included components have
         # been classified as noise, and
         # excluded components as signal.
-        # 
+        #
         # Otherwise we assume that it
         # is an AROMA file, where
         # included components have
@@ -126,7 +126,7 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
         if includeLabel is None:
             if line[0] == '[': includeLabel = 'Unclassified noise'
             else:              includeLabel = 'Movement'
-                
+
         if excludeLabel is None:
             if line[0] == '[': excludeLabel = 'Signal'
             else:              excludeLabel = 'Unknown'
@@ -137,7 +137,7 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
         # whitespace or brackets.
         line = lines[0].strip(' []')
 
-        melDir     = None 
+        melDir     = None
         noisyComps = [int(i) for i in line.split(',')]
         allLabels  = []
 
@@ -153,14 +153,14 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
         noisyComps = lines[-1].strip(' []').split(',')
         noisyComps = [c      for c in noisyComps if c != '']
         noisyComps = [int(c) for c in noisyComps]
-        
+
         # The melodic directory path should
         # either be an absolute path, or
         # be specified relative to the location
         # of the label file.
         if not op.isabs(melDir):
             melDir = op.join(op.dirname(filename), melDir)
-        
+
         # Parse the labels for every component
         allLabels = []
         for i, compLine in enumerate(lines[1:-1]):
@@ -175,7 +175,7 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
 
             try:
                 compIdx = int(tokens[0])
-                
+
             except:
                 raise InvalidLabelFileError(
                     'Invalid FIX classification file - '
@@ -208,14 +208,14 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
                                         'labels: {}'.format(comp, labels))
 
     for comp in noisyComps:
-        
+
         i      = comp - 1
         labels = allLabels[i]
         noise  = isNoisyComponent(labels, signalLabels)
-        
+
         if not noise:
             raise InvalidLabelFileError('Noisy component {} is missing '
-                                        'a noise label'.format(comp)) 
+                                        'a noise label'.format(comp))
 
     return melDir, allLabels
 
@@ -247,7 +247,7 @@ def saveLabelFile(allLabels,
     :arg signalLabels: Labels which should be deemed 'signal' - see the
                        :func:`isNoisyComponent` function.
     """
-    
+
     lines      = []
     noisyComps = []
 
@@ -291,13 +291,13 @@ def isNoisyComponent(labels, signalLabels=None):
     """
     if signalLabels is None:
         signalLabels = ['signal', 'unknown']
-        
+
     signalLabels = [l.lower() for l in signalLabels]
     labels       = [l.lower() for l in labels]
     noise        = not any([sl in labels for sl in signalLabels])
 
     return noise
-    
+
 
 class InvalidLabelFileError(Exception):
     """Exception raised by the :func:`loadLabelFile` function when an attempt
diff --git a/fsl/data/gifti.py b/fsl/data/gifti.py
index 8745415d7..c27054ab8 100644
--- a/fsl/data/gifti.py
+++ b/fsl/data/gifti.py
@@ -84,7 +84,7 @@ class GiftiSurface(mesh.TriangleMesh):
                 vertexData = loadGiftiVertexData(dataSource)[1]
             else:
                 vertexData = None
-            
+
         return mesh.TriangleMesh.loadVertexData(self, dataSource, vertexData)
 
 
@@ -118,8 +118,8 @@ def loadGiftiSurface(filename):
 
                    - A :math:`N\\times 3` ``numpy`` array containing :math:`N`
                      vertices.
-    
-                   - A :math:`M\\times 3` ``numpy`` array containing the 
+
+                   - A :math:`M\\times 3` ``numpy`` array containing the
                      vertex indices for :math:`M` triangles.
     """
 
@@ -134,11 +134,11 @@ def loadGiftiSurface(filename):
     if len(gimg.darrays) != 2:
         raise ValueError('GIFTI surface files must contain exactly '
                          'one pointset array and one triangle array')
-    
+
     if len(pointsets) != 1:
         raise ValueError('GIFTI surface files must contain '
                          'exactly one pointset array')
-    
+
     if len(triangles) != 1:
         raise ValueError('GIFTI surface files must contain '
                          'exactly one triangle array')
@@ -155,7 +155,7 @@ def loadGiftiVertexData(filename):
     It is assumed that the given file does not contain any
     ``NIFTI_INTENT_POINTSET`` or ``NIFTI_INTENT_TRIANGLE`` data arrays, and
     which contains either:
-    
+
       - One ``(M, N)`` data array containing ``N`` data points for ``M``
         vertices
 
@@ -163,9 +163,9 @@ def loadGiftiVertexData(filename):
         for ``M`` vertices, and all with the same intent code
 
     Returns a tuple containing:
-    
+
       - The loaded ``nibabel.gifti.GiftiImage`` object
-    
+
       - A ``(M, N)`` numpy array containing ``N`` data points for ``M``
         vertices
     """
@@ -182,7 +182,7 @@ def loadGiftiVertexData(filename):
 
     if intent in (constants.NIFTI_INTENT_POINTSET,
                   constants.NIFTI_INTENT_TRIANGLE):
-        
+
         raise ValueError('{} contains surface data'.format(filename))
 
     # Just a single array - return it as-is.
@@ -199,7 +199,7 @@ def loadGiftiVertexData(filename):
     if any([len(d.shape) != 1 for d in vdata]):
         raise ValueError('{} contains one or more non-vector '
                          'darrays'.format(filename))
-    
+
     return gimg, np.vstack(vdata).T
 
 
@@ -212,7 +212,7 @@ def relatedFiles(fname):
     # We want to return all files in the same
     # directory which have the following name:
 
-    # 
+    #
     # [prefix].*.[type].gii
     #
     #   where
diff --git a/fsl/data/image.py b/fsl/data/image.py
index 44f441529..1b93982be 100644
--- a/fsl/data/image.py
+++ b/fsl/data/image.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# 
+#
 # image.py - Provides the :class:`Image` class, for representing 3D/4D NIFTI
 #            images.
 #
@@ -23,7 +23,7 @@ and file names:
    :nosignatures:
 
    looksLikeImage
-   addExt 
+   addExt
    splitExt
    getExt
    removeExt
@@ -36,7 +36,7 @@ import                      os
 import os.path           as op
 import                      logging
 
-import                      six 
+import                      six
 import numpy             as np
 
 import nibabel           as nib
@@ -64,25 +64,25 @@ class Nifti(notifier.Notifier):
     When a ``Nifti`` instance is created, it adds the following attributes
     to itself:
 
-    
+
     ================= ====================================================
     ``header``        The :mod:`nibabel` NIFTI1/NIFTI2/Analyze header
                       object.
-    
+
     ``shape``         A list/tuple containing the number of voxels along
                       each image dimension.
-    
-    ``pixdim``        A list/tuple containing the length of one voxel 
+
+    ``pixdim``        A list/tuple containing the length of one voxel
                       along each image dimension.
-    
+
     ``voxToWorldMat`` A 4*4 array specifying the affine transformation
                       for transforming voxel coordinates into real world
                       coordinates.
-    
+
     ``worldToVoxMat`` A 4*4 array specifying the affine transformation
                       for transforming real world coordinates into voxel
                       coordinates.
-    
+
     ``intent``        The NIFTI intent code specified in the header (or
                       :attr:`.constants.NIFTI_INTENT_NONE` for Analyze
                       images).
@@ -94,28 +94,28 @@ class Nifti(notifier.Notifier):
     writing code that should work with all three. Use the :meth:`niftiVersion`
     property if you need to know what type of image you are dealing with.
 
-    
+
     The ``shape`` attribute may not precisely match the image shape as
     reported in the NIFTI header, because trailing dimensions of size 1 are
     squeezed out. See the :meth:`__determineShape` and :meth:`mapIndices`
     methods.
 
-    
+
     **The affine transformation**
 
-    
+
     The :meth:`voxToWorldMat` and :meth:`worldToVoxMat` attributes contain
     transformation matrices for transforming between voxel and world
     coordinates. The ``Nifti`` class follows the same process as ``nibabel``
     in selecting the affine (see
     http://nipy.org/nibabel/nifti_images.html#the-nifti-affines):
 
-    
+
      1. If ``sform_code != 0`` ("unknown") use the sform affine; else
      2. If ``qform_code != 0`` ("unknown") use the qform affine; else
      3. Use the fall-back affine.
 
-    
+
     However, the *fall-back* affine used by the ``Nifti`` class differs to
     that used by ``nibabel``. In ``nibabel``, the origin (world coordinates
     (0, 0, 0)) is set to the centre of the image. Here in the ``Nifti``
@@ -127,13 +127,13 @@ class Nifti(notifier.Notifier):
     is an Analyze image). When you do so:
 
      - Only the ``sform`` of the underlying ``Nifti1Header`` object is changed
-    
+
      - The ``qform`` is not modified.
-    
+
      - If the ``sform_code`` was previously set to ``NIFTI_XFORM_UNKNOWN``,
        it is changed to ``NIFTI_XFORM_ALIGNED_ANAT``. Otherwise, the
        ``sform_code`` is not modified.
-    
+
 
     **ANALYZE support**
 
@@ -146,7 +146,7 @@ class Nifti(notifier.Notifier):
 
       - The affine will be set to a diagonal matrix with the header pixdims as
         its elements (with the X pixdim negated), and an offset specified by
-        the ANALYZE ``origin`` fields. Construction of the affine is handled 
+        the ANALYZE ``origin`` fields. Construction of the affine is handled
         by ``nibabel``.
 
       - The :meth:`niftiVersion` method will return ``0``.
@@ -158,7 +158,7 @@ class Nifti(notifier.Notifier):
     **Notification**
 
 
-    The ``Nifti`` class implements the :class:`.Notifier` interface - 
+    The ``Nifti`` class implements the :class:`.Notifier` interface -
     listeners may register to be notified on the following topics:
 
     =============== ========================================================
@@ -167,14 +167,14 @@ class Nifti(notifier.Notifier):
     =============== ========================================================
     """
 
-    
+
     def __init__(self, header):
         """Create a ``Nifti`` object.
 
-        :arg header: A :class:`nibabel.nifti1.Nifti1Header`, 
+        :arg header: A :class:`nibabel.nifti1.Nifti1Header`,
                        :class:`nibabel.nifti2.Nifti2Header`, or
                        ``nibabel.analyze.AnalyzeHeader`` to be used as the
-                       image header. 
+                       image header.
         """
 
         # Nifti2Header is a sub-class of Nifti1Header,
@@ -201,7 +201,7 @@ class Nifti(notifier.Notifier):
         self.__voxToWorldMat = voxToWorldMat
         self.__worldToVoxMat = worldToVoxMat
 
-    
+
     def __determineTransform(self, header):
         """Called by :meth:`__init__`. Figures out the voxel-to-world
         coordinate transformation matrix that is associated with this
@@ -242,7 +242,7 @@ class Nifti(notifier.Notifier):
         # then we can't assume that the transform
         # matrices are valid. So we fall back to a
         # pixdim scaling matrix.
-        # 
+        #
         # n.b. For images like this, nibabel returns
         # a scaling matrix where the centre voxel
         # corresponds to world location (0, 0, 0).
@@ -253,7 +253,7 @@ class Nifti(notifier.Notifier):
             voxToWorldMat = transform.scaleOffsetXform(pixdims, 0)
 
         # Otherwise we let nibabel decide
-        # which transform to use. 
+        # which transform to use.
         else:
             voxToWorldMat = np.array(header.get_best_affine())
 
@@ -261,9 +261,9 @@ class Nifti(notifier.Notifier):
 
 
     def __determineShape(self, header):
-        """This method is called by :meth:`__init__`. It figures out the actual 
-        shape of the image data, and the zooms/pixdims for each data axis. Any 
-        empty trailing dimensions are squeezed, but the returned shape is 
+        """This method is called by :meth:`__init__`. It figures out the actual
+        shape of the image data, and the zooms/pixdims for each data axis. Any
+        empty trailing dimensions are squeezed, but the returned shape is
         guaranteed to be at least 3 dimensions. Returns:
 
          - A sequence/tuple containing the image shape, as reported in the
@@ -286,7 +286,7 @@ class Nifti(notifier.Notifier):
             pixdims = header['pixdim'][1:]
 
         pixdims = pixdims[:len(shape)]
-        
+
         return origShape, shape, pixdims
 
 
@@ -314,7 +314,7 @@ class Nifti(notifier.Notifier):
         """Returns a tuple containing the image data shape. """
         return tuple(self.__shape)
 
-    
+
     @property
     def pixdim(self):
         """Returns a tuple containing the image pixdims (voxel sizes)."""
@@ -421,14 +421,14 @@ class Nifti(notifier.Notifier):
         # How convenient - nibabel has a function
         # that does the dirty work for us.
         return fileslice.canonical_slicers(sliceobj, self.__origShape)
- 
-        
+
+
     # TODO: Remove this method, and use the shape attribute directly
     def is4DImage(self):
         """Returns ``True`` if this image is 4D, ``False`` otherwise. """
-        return len(self.__shape) > 3 and self.__shape[3] > 1 
+        return len(self.__shape) > 3 and self.__shape[3] > 1
+
 
-    
     def getXFormCode(self, code=None):
         """This method returns the code contained in the NIFTI header,
         indicating the space to which the (transformed) image is oriented.
@@ -462,7 +462,7 @@ class Nifti(notifier.Notifier):
         # Otherwise, if the qform is
         # present, we return that.
         else:
-            
+
             sform_code = self.header['sform_code']
             qform_code = self.header['qform_code']
 
@@ -472,7 +472,7 @@ class Nifti(notifier.Notifier):
         # Invalid values
         if   code > 4: code = constants.NIFTI_XFORM_UNKNOWN
         elif code < 0: code = constants.NIFTI_XFORM_UNKNOWN
-        
+
         return int(code)
 
     # TODO Check what has worse performance - hashing
@@ -529,7 +529,7 @@ class Nifti(notifier.Notifier):
         shape          = list(self.shape[ :3])
         pixdim         = list(self.pixdim[:3])
         voxToPixdimMat = np.diag(pixdim + [1.0])
-        
+
         if self.isNeurological():
             x              = (shape[0] - 1) * pixdim[0]
             flip           = transform.scaleOffsetXform([-1, 1, 1], [x, 0, 0])
@@ -552,7 +552,7 @@ class Nifti(notifier.Notifier):
 
 
     def getOrientation(self, axis, xform):
-        """Returns a code representing the orientation of the specified 
+        """Returns a code representing the orientation of the specified
         axis in the input coordinate system of the given transformation
         matrix.
 
@@ -567,7 +567,7 @@ class Nifti(notifier.Notifier):
 
         This method returns one of the following values, indicating the
         direction in which coordinates along the specified axis increase:
-        
+
           - :attr:`~.constants.ORIENT_L2R`:     Left to right
           - :attr:`~.constants.ORIENT_R2L`:     Right to left
           - :attr:`~.constants.ORIENT_A2P`:     Anterior to posterior
@@ -586,15 +586,15 @@ class Nifti(notifier.Notifier):
         """
 
         if self.getXFormCode() == constants.NIFTI_XFORM_UNKNOWN:
-            return constants.ORIENT_UNKNOWN 
-        
+            return constants.ORIENT_UNKNOWN
+
         code = nib.orientations.aff2axcodes(
             xform,
             ((constants.ORIENT_R2L, constants.ORIENT_L2R),
              (constants.ORIENT_A2P, constants.ORIENT_P2A),
              (constants.ORIENT_S2I, constants.ORIENT_I2S)))[axis]
 
-        return code 
+        return code
 
 
 class Image(Nifti):
@@ -603,9 +603,9 @@ class Image(Nifti):
     :mod:`nibabel.nifti2.Nifti2Image`, and data access managed by a
     :class:`.ImageWrapper`.
 
-    
+
     In addition to the attributes added by the :meth:`Nifti.__init__` method,
-    the following attributes/properties are present on an ``Image`` instance 
+    the following attributes/properties are present on an ``Image`` instance
     as properties (https://docs.python.org/2/library/functions.html#property):
 
 
@@ -618,36 +618,36 @@ class Image(Nifti):
                    describing its origin.
 
     ``nibImage``   A reference to the ``nibabel`` NIFTI image object.
-    
+
     ``saveState``  A boolean value which is ``True`` if this image is
                    saved to disk, ``False`` if it is in-memory, or has
                    been edited.
-    
+
     ``dataRange``  The minimum/maximum values in the image. Depending upon
                    the value of the ``calcRange`` parameter to
                    :meth:`__init__`, this may be calculated when the ``Image``
-                   is created, or may be incrementally updated as more image 
+                   is created, or may be incrementally updated as more image
                    data is loaded from disk.
     ============== ===========================================================
 
-    
+
     The ``Image`` class adds some :class:`.Notifier` topics to those which are
     already provided by the :class:`Nifti` class - listeners may register to
     be notified of changes to the above properties, by registering on the
     following _topic_ names (see the :class:`.Notifier` class documentation):
 
-    
+
     =============== ======================================================
     ``'data'``      This topic is notified whenever the image data changes
                     (via the :meth:`__setitem__` method). The indices/
                     slices of the portion of data that was modified is
                     passed to registered listeners as the notification
                     value (see :meth:`.Notifier.notify`).
-    
+
     ``'saveState'`` This topic is notified whenever the saved state of the
                     image changes (i.e. data or ``voxToWorldMat`` is
                     edited, or the image saved to disk).
-    
+
     ``'dataRange'`` This topic is notified whenever the image data range
                     is changed/adjusted.
     =============== ======================================================
@@ -665,7 +665,7 @@ class Image(Nifti):
                  threaded=False):
         """Create an ``Image`` object with the given image data or file name.
 
-        :arg image:     A string containing the name of an image file to load, 
+        :arg image:     A string containing the name of an image file to load,
                         or a :mod:`numpy` array, or a :mod:`nibabel` image
                         object.
 
@@ -677,7 +677,7 @@ class Image(Nifti):
                         image header. Not applied to images loaded from file,
                         or existing :mod:`nibabel` images.
 
-        :arg xform:     A :math:`4\\times 4` affine transformation matrix 
+        :arg xform:     A :math:`4\\times 4` affine transformation matrix
                         which transforms voxel coordinates into real world
                         coordinates. If not provided, and a ``header`` is
                         provided, the transformation in the header is used.
@@ -690,7 +690,7 @@ class Image(Nifti):
                         from disk. In either case, the image data is
                         accessed through an :class:`.ImageWrapper` instance.
                         The data may be loaded into memory later on via the
-                        :meth:`loadData` method. 
+                        :meth:`loadData` method.
 
         :arg calcRange: If ``True`` (the default), the image range is
                         calculated immediately (vi a call to
@@ -698,7 +698,7 @@ class Image(Nifti):
                         incrementally updated as more data is read from memory
                         or disk.
 
-        :arg indexed:   If ``True``, and the file is gzipped, it is opened 
+        :arg indexed:   If ``True``, and the file is gzipped, it is opened
                         using the :mod:`indexed_gzip` package. Otherwise the
                         file is opened by ``nibabel``. Ignored if ``loadData``
                         is ``True``.
@@ -736,9 +736,9 @@ class Image(Nifti):
             # manage the file reference(s)
             else:
                 nibImage  = nib.load(image)
-                
+
             dataSource = image
- 
+
         # Or a numpy array - we wrap it in a nibabel image,
         # with an identity transformation (each voxel maps
         # to 1mm^3 in real world space)
@@ -750,7 +750,7 @@ class Image(Nifti):
 
             # We default to NIFTI1 and not
             # NIFTI2, because the rest of
-            # FSL is not yet NIFTI2 compatible. 
+            # FSL is not yet NIFTI2 compatible.
             if header is None:
                 ctr = nib.nifti1.Nifti1Image
 
@@ -759,33 +759,33 @@ class Image(Nifti):
             if isinstance(header, nib.nifti2.Nifti2Header):
                 ctr = nib.nifti2.Nifti2Image
             elif isinstance(header, nib.nifti1.Nifti1Header):
-                ctr = nib.nifti1.Nifti1Image 
+                ctr = nib.nifti1.Nifti1Image
             elif isinstance(header, nib.analyze.AnalyzeHeader):
                 ctr = nib.analyze.AnalyzeImage
 
             nibImage = ctr(image, xform, header=header)
-            
+
         # otherwise, we assume that it is a nibabel image
         else:
             nibImage = image
 
-        # Figure out the name of this image, if 
+        # Figure out the name of this image, if
         # it has not beenbeen explicitly passed in
         if name is None:
-            
+
             # If this image was loaded
             # from disk, use the file name.
             if isinstance(image, six.string_types):
                 name = removeExt(op.basename(image))
-            
+
             # Or the image was created from a numpy array
             elif isinstance(image, np.ndarray):
                 name = 'Numpy array'
-            
+
             # Or image from a nibabel image
             else:
                 name = 'Nibabel image'
- 
+
         Nifti.__init__(self, nibImage.get_header())
 
         self.name                = name
@@ -808,9 +808,9 @@ class Image(Nifti):
         if calcRange:
             self.calcRange()
 
-        self.__imageWrapper.register(self.__lName, self.__dataRangeChanged) 
+        self.__imageWrapper.register(self.__lName, self.__dataRangeChanged)
+
 
-        
     def __hash__(self):
         """Returns a number which uniquely idenfities this ``Image`` instance
         (the result of ``id(self)``).
@@ -824,7 +824,7 @@ class Image(Nifti):
                                    self.name,
                                    self.dataSource)
 
-        
+
     def __repr__(self):
         """See the :meth:`__str__` method."""
         return self.__str__()
@@ -832,10 +832,10 @@ class Image(Nifti):
 
     def __del__(self):
         """Closes any open file handles, and clears some references. """
-        
+
         self.__nibImage     = None
         self.__imageWrapper = None
-        
+
         if self.__fileobj is not None:
             self.__fileobj.close()
 
@@ -845,8 +845,8 @@ class Image(Nifti):
         access to the image data.
         """
         return self.__imageWrapper
-        
-    
+
+
     @property
     def dataSource(self):
         """Returns the data source (e.g. file name) that this ``Image`` was
@@ -854,14 +854,14 @@ class Image(Nifti):
         """
         return self.__dataSource
 
-    
+
     @property
     def nibImage(self):
         """Returns a reference to the ``nibabel`` NIFTI image instance.
         """
         return self.__nibImage
 
-    
+
     @property
     def saveState(self):
         """Returns ``True`` if this ``Image`` has been saved to disk, ``False``
@@ -869,7 +869,7 @@ class Image(Nifti):
         """
         return self.__saveState
 
-    
+
     @property
     def dataRange(self):
         """Returns the image data range as a  ``(min, max)`` tuple. If the
@@ -893,17 +893,17 @@ class Image(Nifti):
 
         return drange
 
-    
+
     @property
     def dtype(self):
         """Returns the ``numpy`` data type of the image data. """
-        
+
         # Get the data type from the
         # first voxel in the image
         coords = [0] * len(self.__nibImage.shape)
         return self.__nibImage.dataobj[tuple(coords)].dtype
 
-    
+
     @Nifti.voxToWorldMat.setter
     def voxToWorldMat(self, xform):
         """Overrides the :meth:`Nifti.voxToWorldMat` property setter.
@@ -912,12 +912,12 @@ class Image(Nifti):
         updates the header, and this implementation makes sure the
         image is also updated.
         """
-        
+
         Nifti.voxToWorldMat.fset(self, xform)
-        
+
         xform =     self.voxToWorldMat
         code  = int(self.header['sform_code'])
-        
+
         self.__nibImage.set_sform(xform, code)
 
 
@@ -943,7 +943,7 @@ class Image(Nifti):
 
         :arg sizethres: If not ``None``, specifies an image size threshold
                         (total number of bytes). If the number of bytes in
-                        the image is greater than this threshold, the range 
+                        the image is greater than this threshold, the range
                         is calculated on a sample (the first volume for a
                         4D image, or slice for a 3D image).
         """
@@ -961,13 +961,13 @@ class Image(Nifti):
             log.debug('{}: Forcing calculation of full '
                       'data range'.format(self.name))
             self.__imageWrapper[:]
-            
+
         else:
             log.debug('{}: Calculating data range '
                       'from sample'.format(self.name))
 
             # Otherwise if the number of values in the
-            # image is bigger than the size threshold, 
+            # image is bigger than the size threshold,
             # we'll calculate the range from a sample:
             if len(self.shape) == 3: self.__imageWrapper[:, :, 0]
             else:                    self.__imageWrapper[:, :, :, 0]
@@ -1030,7 +1030,7 @@ class Image(Nifti):
             # instance too, as we have just destroyed
             # the nibabel image we gave to the last
             # one.
-            self.__imageWrapper.deregister(self.__lName) 
+            self.__imageWrapper.deregister(self.__lName)
             self.__imageWrapper = imagewrapper.ImageWrapper(
                 self.nibImage,
                 self.name,
@@ -1041,7 +1041,7 @@ class Image(Nifti):
 
         self.__dataSource = filename
         self.__saveState  = True
-        
+
         self.notify(topic='saveState')
 
 
@@ -1062,7 +1062,7 @@ class Image(Nifti):
         :arg sliceobj: Something which can slice the image data.
         :arg values:   New image data.
 
-        .. note:: Modifying image data will force the entire image to be 
+        .. note:: Modifying image data will force the entire image to be
                   loaded into memory if it has not already been loaded.
         """
         values = np.array(values)
@@ -1086,7 +1086,7 @@ class Image(Nifti):
                 self.notify(topic='saveState')
 
             if not np.all(np.isclose(oldRange, newRange)):
-                self.notify(topic='dataRange') 
+                self.notify(topic='dataRange')
 
 
 ALLOWED_EXTENSIONS = ['.nii.gz', '.nii', '.img', '.hdr', '.img.gz', '.hdr.gz']
@@ -1127,7 +1127,7 @@ def looksLikeImage(filename, allowedExts=None):
               ``myfile``).
 
     :arg filename:    The file name to test.
-    
+
     :arg allowedExts: A list of strings containing the allowed file
                       extensions - defaults to :attr:`ALLOWED_EXTENSIONS`.
     """
@@ -1185,7 +1185,7 @@ def defaultExt():
         'NIFTI_PAIR' : '.img',
         'NIFTI_GZ'   : '.nii.gz',
     }
-    
+
     outputType = os.environ.get('FSLOUTPUTTYPE', 'NIFTI_GZ')
 
     return options.get(outputType, '.nii.gz')
@@ -1200,7 +1200,7 @@ def loadIndexedImageFile(filename):
 
     import                 threading
     import indexed_gzip as igzip
-    
+
     log.debug('Loading {} using indexed gzip'.format(filename))
 
     # guessed_image_type returns a
@@ -1241,12 +1241,12 @@ def read_segments(fileobj, segments, n_bytes):
     """
 
     from mmap import mmap
-    
+
     try:
         # fileobj is a nibabel.openers.ImageOpener - the
         # actual file is available via the fobj attribute
         lock = getattr(fileobj.fobj, '_arrayproxy_lock')
-        
+
     except:
         return fileslice.orig_read_segments(fileobj, segments, n_bytes)
 
@@ -1263,25 +1263,25 @@ def read_segments(fileobj, segments, n_bytes):
             bytes = fileobj.read(length)
         finally:
             lock.release()
-            
+
         if len(bytes) != n_bytes:
             raise ValueError("Whoops, not enough data in file")
         return bytes
-    
+
     # More than one segment
     bytes = mmap(-1, n_bytes)
     for offset, length in segments:
 
         lock.acquire()
-        try: 
+        try:
             fileobj.seek(offset)
             bytes.write(fileobj.read(length))
         finally:
             lock.release()
-            
+
     if bytes.tell() != n_bytes:
         raise ValueError("Oh dear, n_bytes does not look right")
-    return bytes 
+    return bytes
 
 
 # Monkey-patch the above implementation into nibabel
diff --git a/fsl/data/imagewrapper.py b/fsl/data/imagewrapper.py
index 085939302..810624daa 100644
--- a/fsl/data/imagewrapper.py
+++ b/fsl/data/imagewrapper.py
@@ -28,7 +28,7 @@ get their definitions straight:
 
   - *Expansion*:   A sequence of ``(low, high)`` tuples, specifying an
                    index range into each image dimension, that is used to
-                   *expand* the *coverage* of an image, based on a given set 
+                   *expand* the *coverage* of an image, based on a given set
                    of *slices*.
 
   - *Fancy slice*: Any object which is used to slice an array, and is not
@@ -56,9 +56,9 @@ class ImageWrapper(notifier.Notifier):
     access to ``nibabel`` NIFTI images. The ``ImageWrapper`` class can be
     used to:
 
-    
+
       - Control whether the image is loaded into memory, or kept on disk
-    
+
       - Incrementally update the known image data range, as more image
         data is read in.
 
@@ -80,7 +80,7 @@ class ImageWrapper(notifier.Notifier):
 
     *Image dimensionality*
 
-    
+
     The ``ImageWrapper`` abstracts away trailing image dimensions of length 1.
     This means that if the header for a NIFTI image specifies that the image
     has four dimensions, but the fourth dimension is of length 1, you do not
@@ -92,22 +92,22 @@ class ImageWrapper(notifier.Notifier):
 
     *Data access*
 
-    
+
     The ``ImageWrapper`` can be indexed in one of two ways:
 
        - With basic ``numpy``-like multi-dimensional array slicing (with step
          sizes of 1)
-    
+
        - With boolean array indexing, where the boolean/mask array has the
          same shape as the image data.
 
     See https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html for
     more details on numpy indexing.
- 
+
 
     *Data range*
 
-    
+
     In order to avoid the computational overhead of calculating the image data
     range (its minimum/maximum values) when an image is first loaded in, an
     ``ImageWrapper`` incrementally updates the known image data range as data
@@ -118,7 +118,7 @@ class ImageWrapper(notifier.Notifier):
     is always expanded in a rectilinear manner, i.e. the coverage is always
     rectangular for a 2D image, or cuboid for a 3D image.
 
-    
+
     For a 4D image, the ``ImageWrapper`` internally maintains a separate
     coverage and known data range for each 3D volume within the image. For a 3D
     image, separate coverages and data ranges are stored for each 2D slice.
@@ -130,7 +130,7 @@ class ImageWrapper(notifier.Notifier):
     property.
 
 
-    The ``ImageWrapper`` class uses the following functions (also defined in 
+    The ``ImageWrapper`` class uses the following functions (also defined in
     this module) to keep track of the portion of the image that has currently
     been included in the data range calculation:
 
@@ -147,7 +147,7 @@ class ImageWrapper(notifier.Notifier):
        adjustCoverage
     """
 
-    
+
     def __init__(self,
                  image,
                  name=None,
@@ -158,7 +158,7 @@ class ImageWrapper(notifier.Notifier):
 
         :arg image:     A ``nibabel.Nifti1Image`` or ``nibabel.Nifti2Image``.
 
-        :arg name:      A name for this ``ImageWrapper``, solely used for 
+        :arg name:      A name for this ``ImageWrapper``, solely used for
                         debug log messages.
 
         :arg loadData:  If ``True``, the image data is loaded into memory.
@@ -242,9 +242,9 @@ class ImageWrapper(notifier.Notifier):
         """Reset the internal state and known data range of this
         ``ImageWrapper``.
 
-        
+
         :arg dataRange: A tuple containing the initial ``(min, max)``  data
-                        range to use. 
+                        range to use.
 
 
         .. note:: The ``dataRange`` parameter is intended for situations where
@@ -254,7 +254,7 @@ class ImageWrapper(notifier.Notifier):
                   any range calculated from the data, unless the calculated
                   data range is wider than the provided ``dataRange``.
         """
-        
+
         if dataRange is None:
             dataRange = None, None
 
@@ -278,7 +278,7 @@ class ImageWrapper(notifier.Notifier):
         # (or 3D volume for 4D images). This effectively
         # means a seaprate coverage for each index in the
         # last 'real' image dimension (see above).
-        # 
+        #
         # For each slice/volume, the the coverage is
         # stored as sequences of (low, high) indices, one
         # for each dimension in the slice/volume (e.g.
@@ -288,7 +288,7 @@ class ImageWrapper(notifier.Notifier):
         # All of these indices are stored in a numpy array:
         #   - first dimension:  low/high index
         #   - second dimension: image dimension
-        #   - third dimension:  slice/volume index 
+        #   - third dimension:  slice/volume index
         self.__coverage = np.zeros((2, ndims, nvols), dtype=np.float32)
 
         # Internally, we calculate and store the
@@ -303,7 +303,7 @@ class ImageWrapper(notifier.Notifier):
         # (i.e. when all data has been loaded in).
         self.__covered = False
 
-        
+
     @property
     def dataRange(self):
         """Returns the currently known data range as a tuple of ``(min, max)``
@@ -321,7 +321,7 @@ class ImageWrapper(notifier.Notifier):
 
         return low, high
 
-    
+
     @property
     def covered(self):
         """Returns ``True`` if this ``ImageWrapper`` has read the entire
@@ -355,7 +355,7 @@ class ImageWrapper(notifier.Notifier):
         """
         return np.array(self.__coverage[..., vol])
 
-    
+
     def loadData(self):
         """Forces all of the image data to be loaded into memory.
 
@@ -413,7 +413,7 @@ class ImageWrapper(notifier.Notifier):
         """Returns ``True`` if all portions of the image have been covered
         in the data range calculation, ``False`` otherwise.
         """
-        
+
         shape  = self.__image.shape
         slices = zip([0] * len(shape), shape)
         return sliceCovered(slices, self.__coverage)
@@ -448,7 +448,7 @@ class ImageWrapper(notifier.Notifier):
         # but not the volume dimension.
         squeezeDims = tuple(range(self.__numRealDims,
                                   self.__numRealDims + self.__numPadDims))
-        
+
         # The calcExpansion function splits up the
         # expansions on volumes - here we calculate
         # the min/max per volume/expansion, and
@@ -472,7 +472,7 @@ class ImageWrapper(notifier.Notifier):
                 if (not np.isnan(oldvhi)) and oldvhi > newvhi: newvhi = oldvhi
 
                 # Update the stored range and
-                # coverage for each volume 
+                # coverage for each volume
                 self.__volRanges[vol, :]  = newvlo, newvhi
                 self.__coverage[..., vol] = adjustCoverage(
                     self.__coverage[..., vol], exp)
@@ -504,8 +504,8 @@ class ImageWrapper(notifier.Notifier):
         of the image.
 
         :arg slices: A tuple of tuples, each tuple being a ``(low, high)``
-                     index pair, one for each dimension in the image. 
-        
+                     index pair, one for each dimension in the image.
+
         :arg data:   The image data at the given ``slices`` (as a ``numpy``
                      array).
         """
@@ -529,10 +529,10 @@ class ImageWrapper(notifier.Notifier):
         Updates the image data coverage, and known data range accordingly.
 
         :arg slices: A tuple of tuples, each tuple being a ``(low, high)``
-                     index pair, one for each dimension in the image. 
-        
+                     index pair, one for each dimension in the image.
+
         :arg data:   The image data at the given ``slices`` (as a ``numpy``
-                     array). 
+                     array).
         """
 
         overlap = sliceOverlap(slices, self.__coverage)
@@ -541,7 +541,7 @@ class ImageWrapper(notifier.Notifier):
         # area and the current coverage, then it's
         # easy - we just expand the coverage to
         # include the newly written area.
-        # 
+        #
         # But if there is overlap between the written
         # area and the current coverage, things are
         # more complicated, because the portion of
@@ -595,23 +595,23 @@ class ImageWrapper(notifier.Notifier):
                 self.__taskThread.enqueue(
                     self.__expandCoverage, slices, taskName=name)
 
-            
+
     def __getitem__(self, sliceobj):
         """Returns the image data for the given ``sliceobj``, and updates
         the known image data range if necessary.
 
         :arg sliceobj: Something which can slice the image data.
         """
-        
+
         log.debug('Getting image data: {}'.format(sliceobj))
-        
+
         shape              = self.__canonicalShape
         realShape          = self.__image.shape
         sliceobj           = canonicalSliceObj(   sliceobj, shape)
         fancy              = isValidFancySliceObj(sliceobj, shape)
         expNdims, expShape = expectedShape(       sliceobj, shape)
 
-        # TODO Cache 3D images for large 4D volumes, 
+        # TODO Cache 3D images for large 4D volumes,
         #      so you don't have to hit the disk?
 
         # Make the slice object compatible with the
@@ -619,7 +619,7 @@ class ImageWrapper(notifier.Notifier):
         sliceobj = canonicalSliceObj(sliceobj, realShape)
         data     = self.__getData(sliceobj)
 
-        # Update data range for the 
+        # Update data range for the
         # data that we just read in
         if not self.__covered:
 
@@ -648,20 +648,20 @@ class ImageWrapper(notifier.Notifier):
             # ndarray with 0 dims, data[0] will raise
             # an error!
             data = data[()]
-                
+
         return data
 
 
     def __setitem__(self, sliceobj, values):
         """Writes the given ``values`` to the image at the given ``sliceobj``.
 
-        
+
         :arg sliceobj: Something which can be used to slice the array.
         :arg values:   Data to write to the image.
 
-        
+
         .. note:: Modifying image data will cause the entire image to be
-                  loaded into memory. 
+                  loaded into memory.
         """
 
         realShape = self.__image.shape
@@ -675,7 +675,7 @@ class ImageWrapper(notifier.Notifier):
         # values to prevent numpy from raising
         # an error in the assignment below.
         if realShape != self.__canonicalShape:
-            
+
             expNdims, expShape = expectedShape(sliceobj, realShape)
 
             # If we are slicing a scalar, the
@@ -685,13 +685,13 @@ class ImageWrapper(notifier.Notifier):
                 if len(values) > 1:
                     raise IndexError('Invalid assignment: [{}] = {}'.format(
                         sliceobj, len(values)))
-        
+
                 values = values[0]
 
-            # Make sure that the values 
+            # Make sure that the values
             # have a compatible shape.
             else:
-                
+
                 values = np.array(values)
                 if values.shape != expShape:
                     values = values.reshape(expShape)
@@ -774,7 +774,7 @@ def canonicalSliceObj(sliceobj, shape):
     ``nibabel.fileslice.canonical_slicers`` function.
     """
 
-    # Fancy slice objects must have 
+    # Fancy slice objects must have
     # the same shape as the data
     if isValidFancySliceObj(sliceobj, shape):
         return sliceobj.reshape(shape)
@@ -783,12 +783,12 @@ def canonicalSliceObj(sliceobj, shape):
 
         if not isinstance(sliceobj, tuple):
             sliceobj = (sliceobj,)
-        
+
         if len(sliceobj) > len(shape):
             sliceobj = sliceobj[:len(shape)]
 
         return nib.fileslice.canonical_slicers(sliceobj, shape)
-    
+
 
 def canonicalShape(shape):
     """Calculates a *canonical* shape, how the given ``shape`` should
@@ -800,12 +800,12 @@ def canonicalShape(shape):
 
     # Squeeze out empty dimensions, as
     # 3D image can sometimes be listed
-    # as having 4 or more dimensions 
+    # as having 4 or more dimensions
     for i in reversed(range(len(shape))):
         if shape[i] == 1: shape = shape[:i]
         else:             break
 
-    # But make sure the shape 
+    # But make sure the shape
     # has at 3 least dimensions
     if len(shape) < 3:
         shape = shape + [1] * (3 - len(shape))
@@ -827,13 +827,13 @@ def expectedShape(sliceobj, shape):
     :arg shape:    Shape of the array being sliced.
 
     :returns:      A tuple containing:
-    
+
                      - Expected number of dimensions of the result
-    
+
                      - Expected shape of the result (or ``None`` if
                        ``sliceobj`` is fancy).
     """
-    
+
     if isValidFancySliceObj(sliceobj, shape):
         return 1, None
 
@@ -845,19 +845,19 @@ def expectedShape(sliceobj, shape):
 
     # Figure out the number of dimensions
     # that the result should have, given
-    # this slice object. 
+    # this slice object.
     expShape = []
 
     for i in range(len(sliceobj)):
 
-        # Each dimension which has an 
+        # Each dimension which has an
         # int slice will be collapsed
         if isinstance(sliceobj[i], int):
             continue
 
         start = sliceobj[i].start
         stop  = sliceobj[i].stop
-        
+
         if start is None: start = 0
         if stop  is None: stop  = shape[i]
 
@@ -893,7 +893,7 @@ def sliceObjToSliceTuple(sliceobj, shape):
 
     for dim, s in enumerate(sliceobj):
 
-        # each element in the slices tuple should 
+        # each element in the slices tuple should
         # be a slice object or an integer
         if isinstance(s, slice): i = [s.start, s.stop]
         else:                    i = [s,       s + 1]
@@ -921,14 +921,14 @@ def sliceTupleToSliceObj(slices):
     return tuple(sliceobj)
 
 
-def adjustCoverage(oldCoverage, slices): 
+def adjustCoverage(oldCoverage, slices):
     """Adjusts/expands the given ``oldCoverage`` so that it covers the
     given set of ``slices``.
 
     :arg oldCoverage: A ``numpy`` array of shape ``(2, n)`` containing
                       the (low, high) index pairs for ``n`` dimensions of
                       a single slice/volume in the image.
-    
+
     :arg slices:      A sequence of (low, high) index pairs. If ``slices``
                       contains more dimensions than are specified in
                       ``oldCoverage``, the trailing dimensions are ignored.
@@ -982,7 +982,7 @@ def sliceOverlap(slices, coverage):
                     the current image coverage.
 
     :returns: One of the following codes:
-    
+
               .. autosummary::
 
               OVERLAP_ALL
@@ -1003,7 +1003,7 @@ def sliceOverlap(slices, coverage):
         for dim in range(numDims):
 
             lowCover, highCover = coverage[:, dim, vol]
-            lowSlice, highSlice = slices[     dim] 
+            lowSlice, highSlice = slices[     dim]
 
             # No coverage
             if np.isnan(lowCover) or np.isnan(highCover):
@@ -1028,7 +1028,7 @@ def sliceOverlap(slices, coverage):
             # slice and coverage on this dimension
             # - check the other dimensions.
             state = OVERLAP_SOME
-            
+
         overlapStates[i] = state
 
     if   np.any(overlapStates == OVERLAP_SOME): return OVERLAP_SOME
@@ -1057,7 +1057,7 @@ def sliceCovered(slices, coverage):
         for dim in range(numDims):
 
             lowCover, highCover = coverage[:, dim, vol]
-            lowSlice, highSlice = slices[     dim] 
+            lowSlice, highSlice = slices[     dim]
 
             if np.isnan(lowCover) or np.isnan(highCover):
                 return False
@@ -1081,7 +1081,7 @@ def calcExpansion(slices, coverage):
 
     numDims         = coverage.shape[1]
     padDims         = len(slices) - numDims - 1
-    lowVol, highVol = slices[numDims] 
+    lowVol, highVol = slices[numDims]
 
     expansions = []
     volumes    = []
@@ -1095,10 +1095,10 @@ def calcExpansion(slices, coverage):
         for i in range(padDims):
             exp.append((0, 1))
         return exp
-    
+
     for vol in range(lowVol, highVol):
 
-        # No coverage of this volume - 
+        # No coverage of this volume -
         # we need the whole slice.
         if np.any(np.isnan(coverage[:, :, vol])):
             exp = [(s[0], s[1]) for s in slices[:numDims]]
@@ -1125,7 +1125,7 @@ def calcExpansion(slices, coverage):
             # below the current coverage
             if lowCover - lowSlice > 0:
                 reqRanges.append((dim, int(lowSlice), int(lowCover)))
-                
+
             # The slice covers a region
             # above the current coverage
             if highCover - highSlice < 0:
@@ -1143,7 +1143,7 @@ def calcExpansion(slices, coverage):
             # for that dimension...
             expansion[dimx][0] = xlo
             expansion[dimx][1] = xhi
-                
+
             # And will span the union of
             # the coverage, and calculated
             # range for every other dimension.
@@ -1239,7 +1239,7 @@ def calcExpansion(slices, coverage):
 
 
 def collapseExpansions(expansions, numDims):
-    """Scans through the given list of expansions (each assumed to pertain 
+    """Scans through the given list of expansions (each assumed to pertain
     to a single 3D image), and combines any which cover the same
     image area, and cover adjacent volumes.
 
@@ -1268,7 +1268,7 @@ def collapseExpansions(expansions, numDims):
         vol        = exp[numDims][0]
         exp        = tuple(exp[:numDims])
         commonExps = commonExpansions.get(exp, None)
-        
+
         if commonExps is None:
             commonExps            = []
             commonExpansions[exp] = commonExps
@@ -1277,17 +1277,17 @@ def collapseExpansions(expansions, numDims):
 
             if vol >= vlo and vol < vhi:
                 break
-            
+
             elif vol == vlo - 1:
                 commonExps[i] = vol, vhi
                 break
             elif vol == vhi:
                 commonExps[i] = vlo, vol + 1
                 break
-            
+
         else:
             commonExps.append((vol, vol + 1))
-            
+
     collapsed = []
 
     for exp, volRanges in commonExpansions.items():
diff --git a/fsl/data/melodicanalysis.py b/fsl/data/melodicanalysis.py
index 67928daa3..d0074f27f 100644
--- a/fsl/data/melodicanalysis.py
+++ b/fsl/data/melodicanalysis.py
@@ -2,7 +2,7 @@
 #
 # melodicanalysis.py - Utility functions for loading/querying the contents of
 # a MELODIC analysis directory.
-# 
+#
 # Author: Paul McCarthy <pauldmccarthy@gmail.com>
 #
 """This module provides a set of functions for accessing the contents of a
@@ -43,7 +43,7 @@ log = logging.getLogger(__name__)
 
 def isMelodicImage(path):
     """Returns ``True`` if the given path looks like it is a melodic
-    component image file, ``False`` otherwise. 
+    component image file, ``False`` otherwise.
     """
 
 
@@ -69,7 +69,7 @@ def isMelodicDir(path):
     """
 
     path = op.abspath(path)
-    
+
     if op.isdir(path): dirname = path
     else:              dirname = op.dirname(path)
 
@@ -88,7 +88,7 @@ def isMelodicDir(path):
     # melodic_mix and melodic_FTmix
     if not op.exists(op.join(dirname, 'melodic_mix')):   return False
     if not op.exists(op.join(dirname, 'melodic_FTmix')): return False
- 
+
     return True
 
 
@@ -101,7 +101,7 @@ def getAnalysisDir(path):
 
     if meldir is not None and isMelodicDir(meldir):
         return meldir
-    
+
     return None
 
 
@@ -111,10 +111,10 @@ def getTopLevelAnalysisDir(path):
     file system) directory is returned. Otherwise, ``None`` is returned.
 
     See :func:`.featanalysis.getTopLevelAnalysisDir`.
-    """ 
+    """
     return featanalysis.getTopLevelAnalysisDir(path)
 
-    
+
 def getDataFile(meldir):
     """If the given melodic directory is contained within another analysis
     directory, the path to the data file is returned. Otherwise ``None`` is
@@ -144,7 +144,7 @@ def getICFile(meldir):
 
 def getMixFile(meldir):
     """Returns the path to the melodic mix file. """
-    
+
     mixfile = op.join(meldir, 'melodic_mix')
     if op.exists(mixfile): return mixfile
     else:                  return None
@@ -152,7 +152,7 @@ def getMixFile(meldir):
 
 def getFTMixFile(meldir):
     """Returns the path to the melodic FT mix file. """
-    
+
     ftmixfile = op.join(meldir, 'melodic_FTmix')
     if op.exists(ftmixfile): return ftmixfile
     else:                    return None
@@ -162,10 +162,10 @@ def getReportFile(meldir):
     """Returns the path to the MELODIC report index file, or ``None`` if there
     is no report.
     """
-    
+
     report = op.join(meldir, '..', 'report.html')
     if op.exists(report): return report
-    else:                 return None 
+    else:                 return None
 
 
 def getNumComponents(meldir):
diff --git a/fsl/data/melodicimage.py b/fsl/data/melodicimage.py
index 61931a3e4..80a4125d0 100644
--- a/fsl/data/melodicimage.py
+++ b/fsl/data/melodicimage.py
@@ -24,7 +24,7 @@ class MelodicImage(fslimage.Image):
     The ``MelodicImage`` class provides a few MELODIC-specific attributes and
     methods:
 
-    
+
     .. autosummary::
        :nosignatures:
 
@@ -59,8 +59,8 @@ class MelodicImage(fslimage.Image):
 
         if not melanalysis.isMelodicImage(path):
             raise ValueError('{} does not appear to be a '
-                             'MELODIC component file'.format(path)) 
-            
+                             'MELODIC component file'.format(path))
+
         fslimage.Image.__init__(self, path, *args, **kwargs)
 
         meldir            = op.dirname(path)
@@ -73,7 +73,7 @@ class MelodicImage(fslimage.Image):
         # TR value if possible
         dataFile = self.getDataFile()
 
-        if dataFile is not None: 
+        if dataFile is not None:
             dataImage = fslimage.Image(dataFile,
                                        loadData=False,
                                        calcRange=False)
@@ -88,7 +88,7 @@ class MelodicImage(fslimage.Image):
         initialised from the data file (see the :meth:`getDataFile` method).
         """
         return self.__tr
-    
+
 
     @tr.setter
     def tr(self, val):
@@ -101,24 +101,24 @@ class MelodicImage(fslimage.Image):
         if oldval != val:
             self.notify(topic='tr')
 
-        
+
     def getComponentTimeSeries(self, component):
         """Returns the time course for the specified (0-indexed) component. """
         return self.__melmix[:, component]
 
-    
+
     def getComponentPowerSpectrum(self, component):
         """Returns the power spectrum for the time course of the specified
         (0-indexed) component.
         """
-        return self.__melFTmix[:, component] 
+        return self.__melFTmix[:, component]
 
 
     def numComponents(self):
         """Returns the number of components in this ``MelodicImage``. """
         return self.shape[3]
 
-    
+
     def getMelodicDir(self):
         """Returns the melodic output directory in which this image is
         contained.
@@ -131,7 +131,7 @@ class MelodicImage(fslimage.Image):
         :func:`.melodicanalysis.getReportFile`.
         """
         return melanalysis.getReportFile(self.__meldir)
-    
+
 
     def getTopLevelAnalysisDir(self):
         """Returns the top level analysis, if the melodic analysis for this
@@ -155,4 +155,4 @@ class MelodicImage(fslimage.Image):
         ``MelodicImage``. See the :func:`.melodicanalysis.getMeanFile`
         function.
         """
-        return melanalysis.getMeanFile(self.__meldir) 
+        return melanalysis.getMeanFile(self.__meldir)
diff --git a/fsl/data/mesh.py b/fsl/data/mesh.py
index ce7094e58..e34ebf2a7 100644
--- a/fsl/data/mesh.py
+++ b/fsl/data/mesh.py
@@ -41,16 +41,16 @@ class TriangleMesh(object):
 
     A ``TriangleMesh`` instance has the following attributes:
 
-    
+
     ============== ====================================================
     ``name``       A name, typically the file name sans-suffix.
-    
+
     ``dataSource`` Full path to the mesh file (or ``None`` if there is
                    no file associated with this mesh).
-    
+
     ``vertices``   A :math:`N\times 3` ``numpy`` array containing
                    the vertices.
-    
+
     ``indices``    A :meth:`M\times 3` ``numpy`` array containing
                    the vertex indices for :math:`M` triangles
     ============== ====================================================
@@ -67,7 +67,7 @@ class TriangleMesh(object):
        clearVertexData
     """
 
-    
+
     def __init__(self, data, indices=None):
         """Create a ``TriangleMesh`` instance.
 
@@ -93,13 +93,13 @@ class TriangleMesh(object):
         else:
             self.name       = 'TriangleMesh'
             self.dataSource = None
-            
+
         if indices is None:
             indices = np.arange(data.shape[0])
 
         self.vertices     = np.array(data)
         self.indices      = np.array(indices).reshape((-1, 3))
-        
+
         self.__vertexData = {}
         self.__loBounds   = self.vertices.min(axis=0)
         self.__hiBounds   = self.vertices.max(axis=0)
@@ -120,7 +120,7 @@ class TriangleMesh(object):
 
     def getBounds(self):
         """Returns a tuple of values which define a minimal bounding box that
-        will contain all vertices in this ``TriangleMesh`` instance. The 
+        will contain all vertices in this ``TriangleMesh`` instance. The
         bounding box is arranged like so:
 
             ``((xlow, ylow, zlow), (xhigh, yhigh, zhigh))``
@@ -169,7 +169,7 @@ class TriangleMesh(object):
         try:             return self.__vertexData[dataSource]
         except KeyError: return self.loadVertexData(dataSource)
 
-    
+
     def clearVertexData(self):
         """Clears the internal vertex data cache - see the
         :meth:`loadVertexData` and :meth:`getVertexData`  methods.
@@ -193,14 +193,14 @@ def loadVTKPolydataFile(infile):
     :arg infile: Name of a file to load from.
 
     :returns: a tuple containing three values:
-    
+
                 - A :math:`N\\times 3` ``numpy`` array containing :math:`N`
                   vertices.
                 - A 1D ``numpy`` array containing the lengths of each polygon.
                 - A 1D ``numpy`` array containing the vertex indices for all
                   polygons.
     """
-    
+
     lines = None
 
     with open(infile, 'rt') as f:
@@ -210,11 +210,11 @@ def loadVTKPolydataFile(infile):
 
     if lines[3] != 'DATASET POLYDATA':
         raise ValueError('Only the POLYDATA data type is supported')
-    
+
     nVertices = int(lines[4].split()[1])
     nPolygons = int(lines[5 + nVertices].split()[1])
-    nIndices  = int(lines[5 + nVertices].split()[2]) - nPolygons 
-    
+    nIndices  = int(lines[5 + nVertices].split()[2]) - nPolygons
+
     vertices       = np.zeros((nVertices, 3), dtype=np.float32)
     polygonLengths = np.zeros( nPolygons,     dtype=np.uint32)
     indices        = np.zeros( nIndices,      dtype=np.uint32)
diff --git a/fsl/data/vest.py b/fsl/data/vest.py
index dda920564..2c6056880 100644
--- a/fsl/data/vest.py
+++ b/fsl/data/vest.py
@@ -64,6 +64,6 @@ def loadVestLutFile(path, normalise=True):
         cmin = colours.min()
         cmax = colours.max()
         return (colours - cmin) / (cmax - cmin)
-    
+
     else:
         return colours
diff --git a/fsl/data/volumelabels.py b/fsl/data/volumelabels.py
index 14648702b..ba03cf117 100644
--- a/fsl/data/volumelabels.py
+++ b/fsl/data/volumelabels.py
@@ -53,7 +53,7 @@ class VolumeLabels(notifier.Notifier):
        clearComponents
 
     The ``VolumeLabels`` class uses the :class:`.Notifier` interface
-    to notify listeners about changes to the labels. Listeners can be 
+    to notify listeners about changes to the labels. Listeners can be
     registered to be notified on the following topics:
 
       - ``added``:   A new label was added to a component.
@@ -68,7 +68,7 @@ class VolumeLabels(notifier.Notifier):
               accessible via the :meth:`getDisplayLabel` method.
     """
 
-    
+
     def __init__(self, nvolumes):
         """Create a ``VolumeLabels`` instance.
 
@@ -86,7 +86,7 @@ class VolumeLabels(notifier.Notifier):
         # __components is a dictionary of
         #
         #   { label : [component] } mappings
-        # 
+        #
         # containing the same information, but
         # making lookup by label a bit quicker.
         #
@@ -113,7 +113,7 @@ class VolumeLabels(notifier.Notifier):
 
         self.__components = {}
         self.__labels     = [[] for i in range(self.__ncomps)]
-        
+
 
     def load(self, filename):
         """Loads component labels from the specified file. See the
@@ -151,7 +151,7 @@ class VolumeLabels(notifier.Notifier):
                 for label in labels:
                     self.addLabel(i, label)
 
-    
+
     def save(self, filename, dirname=None):
         """Saves the component classifications stored by this ``VolumeLabels``
         instance to the specified file. See the
@@ -185,7 +185,7 @@ class VolumeLabels(notifier.Notifier):
         """
         label = label.lower()
         return label in self.__labels[component]
-    
+
 
     def addLabel(self, component, label, notify=True):
         """Adds the given label to the given component.
@@ -203,7 +203,7 @@ class VolumeLabels(notifier.Notifier):
         label   = label.lower()
         labels  = list(self.__labels[component])
         comps   = list(self.__components.get(label, []))
-        
+
         if label in labels:
             return False
 
@@ -211,7 +211,7 @@ class VolumeLabels(notifier.Notifier):
         comps .append(component)
 
         self.__displayLabels[label]     = display
-        self.__components[   label]     = comps        
+        self.__components[   label]     = comps
         self.__labels[       component] = labels
 
         log.debug('Label added to component: {} <-> {}'.format(component,
@@ -221,12 +221,12 @@ class VolumeLabels(notifier.Notifier):
             self.notify(topic='added', value=[(component, label)])
 
         return True
- 
+
 
     def removeLabel(self, component, label, notify=True):
         """Removes the given label from the given component.
 
-        :returns: ``True`` if the label was removed, ``False`` if the 
+        :returns: ``True`` if the label was removed, ``False`` if the
                   component did not have this label.
         """
 
@@ -258,7 +258,7 @@ class VolumeLabels(notifier.Notifier):
 
     def clearLabels(self, component):
         """Removes all labels from the given component. """
-        
+
         labels  = self.getLabels(component)
         removed = []
 
@@ -271,20 +271,20 @@ class VolumeLabels(notifier.Notifier):
         if len(removed) > 0:
             self.notify(topic='removed', value=removed)
 
-    
+
     def getComponents(self, label):
         """Returns a list of all components which have the given label. """
         label = label.lower()
         return list(self.__components.get(label, []))
 
-    
+
     def hasComponent(self, label, component):
         """Returns ``True`` if the given compoennt has the given label,
         ``False`` otherwise.
         """
         return component in self.getComponents(label)
 
-    
+
     def addComponent(self, label, component):
         """Adds the given label to the given component. """
         return self.addLabel(component, label)
@@ -294,10 +294,10 @@ class VolumeLabels(notifier.Notifier):
         """Removes the given label from the given component. """
         return self.removeLabel(component, label, notify)
 
-    
+
     def clearComponents(self, label):
         """Removes the given label from all components. """
-        
+
         components = self.getComponents(label)
         removed    = []
 
diff --git a/fsl/scripts/imcp.py b/fsl/scripts/imcp.py
index 609504fe7..9b7129755 100755
--- a/fsl/scripts/imcp.py
+++ b/fsl/scripts/imcp.py
@@ -5,7 +5,7 @@
 # Author: Paul McCarthy <paulmc@fmrib.ox.ac.uk>
 #
 """This module defines the ``imcp`` application, for copying NIFTI image
-files. 
+files.
 
 The :func:`main` function is essentially a wrapper around the
 :func:`fsl.utils.imcp.imcp` function - see its documentation for more details.
@@ -56,7 +56,7 @@ def main(argv=None):
                                     fileGroups=fslimage.FILE_GROUPS)
 
     for src in srcs:
-        imcp.imcp(src, dest, useDefaultExt=True, overwrite=True) 
+        imcp.imcp(src, dest, useDefaultExt=True, overwrite=True)
 
 
 if __name__ == '__main__':
@@ -65,4 +65,4 @@ if __name__ == '__main__':
         main()
     except Exception as e:
         print(e)
-        sys.exit(1) 
+        sys.exit(1)
diff --git a/fsl/scripts/immv.py b/fsl/scripts/immv.py
index 1d2219fbd..90a52f963 100755
--- a/fsl/scripts/immv.py
+++ b/fsl/scripts/immv.py
@@ -54,7 +54,7 @@ def main(argv=None):
 
     srcs = fslpath.removeDuplicates(srcs,
                                     allowedExts=fslimage.ALLOWED_EXTENSIONS,
-                                    fileGroups=fslimage.FILE_GROUPS) 
+                                    fileGroups=fslimage.FILE_GROUPS)
 
     for src in srcs:
         imcp.immv(src, dest, useDefaultExt=True, overwrite=True)
diff --git a/fsl/utils/async.py b/fsl/utils/async.py
index 61e5aca90..08142c82f 100644
--- a/fsl/utils/async.py
+++ b/fsl/utils/async.py
@@ -132,30 +132,30 @@ def run(task, onFinish=None, onError=None, name=None):
 
     # Calls the onFinish or onError handler
     def callback(cb, *args, **kwargs):
-        
+
         if cb is None:
             return
-        
+
         if haveWX: idle(cb, *args, **kwargs)
         else:      cb(      *args, **kwargs)
 
-    # Runs the task, and calls 
+    # Runs the task, and calls
     # callback functions as needed.
     def wrapper():
 
         try:
             task()
             log.debug('Task "{}" finished'.format(name))
-            callback(onFinish) 
-            
+            callback(onFinish)
+
         except Exception as e:
-            
+
             log.warn('Task "{}" crashed'.format(name), exc_info=True)
             callback(onError, e)
 
     # If WX, run on a thread
     if haveWX:
-        
+
         log.debug('Running task "{}" on thread'.format(name))
 
         thread = threading.Thread(target=wrapper)
@@ -238,7 +238,7 @@ def setIdleTimeout(timeout=None):
     """
 
     global _idleCallRate
-    
+
     if timeout is None:
         timeout = 200
 
@@ -292,7 +292,7 @@ def _wxIdleLoop(ev):
 
     try:
         task = _idleQueue.get_nowait()
-        
+
     except queue.Empty:
 
         # Make sure that we get called periodically,
@@ -323,7 +323,7 @@ def _wxIdleLoop(ev):
 
     # Has the task timed out?
     elif task.timeout == 0 or (elapsed < task.timeout):
-        
+
         log.debug('Running function ({}) on wx idle loop'.format(taskName))
 
         try:
@@ -344,7 +344,7 @@ def _wxIdleLoop(ev):
 
 def inIdle(taskName):
     """Returns ``True`` if a task with the given name is queued on the
-    idle loop (or is currently running), ``False`` otherwise. 
+    idle loop (or is currently running), ``False`` otherwise.
     """
     global _idleQueueDict
     return taskName in _idleQueueDict
@@ -356,7 +356,7 @@ def cancelIdle(taskName):
 
     A ``KeyError`` is raised if no task called ``taskName`` exists.
     """
-    
+
     global _idleQueueDict
     _idleQueueDict[taskName].timeout = -1
 
@@ -404,7 +404,7 @@ def idle(task, *args, **kwargs):
 
     All other arguments are passed through to the task function.
 
-    
+
     If a ``wx.App`` is not running, the ``timeout``, ``name`` and
     ``skipIfQueued`` arguments are ignored. Instead, the call will sleep for
     ``after`` seconds, and then the ``task`` is called directly.
@@ -447,7 +447,7 @@ def idle(task, *args, **kwargs):
         if havewx and (not _idleRegistered):
             app = wx.GetApp()
             app.Bind(wx.EVT_IDLE, _wxIdleLoop)
-            
+
             _idleTimer      = wx.Timer(app)
             _idleRegistered = True
 
@@ -466,7 +466,7 @@ def idle(task, *args, **kwargs):
                 cancelIdle(name)
                 log.debug('Idle task ({}) is already queued - '
                           'dropping the old task'.format(name))
-                
+
             elif skipIfQueued:
                 log.debug('Idle task ({}) is already queued '
                           '- skipping it'.format(name))
@@ -487,10 +487,10 @@ def idle(task, *args, **kwargs):
 
         if name is not None:
             _idleQueueDict[name] = idleTask
-            
+
     else:
         time.sleep(after)
-        log.debug('Running idle task directly') 
+        log.debug('Running idle task directly')
         task(*args, **kwargs)
 
 
@@ -499,11 +499,11 @@ def idleWhen(func, condition, *args, **kwargs):
     :func:`idle` when it returns ``True``.
 
     :arg func:      Function to call.
-    
+
     :arg condition: Function which returns ``True`` or ``False``. The ``func``
-                    function is only called when the ``condition`` function 
+                    function is only called when the ``condition`` function
                     returns ``True``.
-    
+
     :arg pollTime:  Must be passed as a keyword argument. Time (in seconds) to
                     wait between successive calls to ``when``. Defaults to
                     ``0.2``.
@@ -538,10 +538,10 @@ def wait(threads, task, *args, **kwargs):
                        function will create a new thread to ``join`` the
                        ``threads``, and will return immediately.
 
-    
+
     All other arguments are passed to the ``task`` function.
 
-    
+
     .. note:: This function will not support ``task`` functions which expect
               a keyword argument called ``wait_direct``.
     """
@@ -550,7 +550,7 @@ def wait(threads, task, *args, **kwargs):
 
     if not isinstance(threads, collections.Sequence):
         threads = [threads]
-    
+
     haveWX = _haveWX()
 
     def joinAll():
@@ -566,7 +566,7 @@ def wait(threads, task, *args, **kwargs):
         thread = threading.Thread(target=joinAll)
         thread.start()
         return thread
-    
+
     else:
         joinAll()
         return None
@@ -632,7 +632,7 @@ class TaskThread(threading.Thread):
         All other arguments are passed through to the task function when it is
         executed.
 
-        .. note:: If the specified ``taskName`` is not unique (i.e. another 
+        .. note:: If the specified ``taskName`` is not unique (i.e. another
                   task with the same name may already be enqueued), the
                   :meth:`isQueued` method will probably return invalid
                   results.
@@ -760,7 +760,7 @@ class TaskThread(threading.Thread):
                 log.debug('Task completed (vetoed onFinish): {} [{}]'.format(
                     task.name,
                     getattr(task.func, '__name__', '<unknown>')))
-                
+
             except Exception as e:
                 log.warning('Task crashed: {} [{}]: {}: {}'.format(
                     task.name,
@@ -798,7 +798,7 @@ def mutex(*args, **kwargs):
             def dangerousMethod2(self):
                 return sefl.__sharedData.pop()
 
-    
+
 
     The ``@mutex`` decorator will ensure that, at any point in time, only
     one thread is running either of the ``dangerousMethod1`` or
@@ -840,7 +840,7 @@ class MutexFactory(object):
         If this ``MutexFactory`` is accessed through a class, the
         decorated function is returned.
         """
-        
+
         # Class-level access
         if instance is None:
             return self.__func
@@ -863,7 +863,7 @@ class MutexFactory(object):
                 lock.release()
 
         # Replace this MutexFactory with
-        # the decorator on the instance 
+        # the decorator on the instance
         decorator = functools.update_wrapper(decorator, self.__func)
         setattr(instance, self.__func.__name__, decorator)
         return decorator
diff --git a/fsl/utils/cache.py b/fsl/utils/cache.py
index c49a6ff16..ec42dd876 100644
--- a/fsl/utils/cache.py
+++ b/fsl/utils/cache.py
@@ -27,7 +27,7 @@ class CacheItem(object):
         self.value     = value
         self.expiry    = expiry
         self.storetime = time.time()
-        
+
 
 class Cache(object):
     """The ``Cache`` is a simple in-memory cache built on a
@@ -36,7 +36,7 @@ class Cache(object):
 
        - When an item is added to a full cache, the oldest entry is
          automatically dropped.
-    
+
        - Expiration times can be specified for individual items. If a request
          is made to access an expired item, an :class:`Expired` exception is
          raised.
@@ -56,9 +56,9 @@ class Cache(object):
         """Put an item in the cache.
 
         :arg key:    Item identifier (must be hashable).
-        
+
         :arg value:  The item to store.
-        
+
         :arg expiry: Expiry time in seconds. An item with an expiry time of
                      ``0`` will not expire.
         """
@@ -83,7 +83,7 @@ class Cache(object):
         # Default value specified - return
         # it if the key is not in the cache
         if defaultSpecified:
-            
+
             entry = self.__cache.get(key, None)
 
             if entry is None:
@@ -93,12 +93,12 @@ class Cache(object):
         # allow KeyErrors to propagate
         else:
             entry = self.__cache[key]
-            
+
         if entry.expiry > 0:
             if time.time() - entry.storetime > entry.expiry:
 
                 self.__cache.pop(key)
- 
+
                 if defaultSpecified: return default
                 else:                raise Expired(key)
 
@@ -120,10 +120,10 @@ class Cache(object):
         which may be specified as either a positional or keyword argumnet.
 
         :returns: A tuple containing two values:
-        
+
                     - ``True`` if a default argument was specified, ``False``
                       otherwise.
-        
+
                     - The specifeid default value, or ``None`` if it wasn't
                       specified.
         """
@@ -135,7 +135,7 @@ class Cache(object):
         if   nargs == 0: return False, None
         elif nargs != 1: raise ValueError()
 
-        # The default value is either specified as a 
+        # The default value is either specified as a
         # positional argument, or as a keyword argument
         if   len(args)   == 1: return True, args[0]
         elif len(kwargs) == 1: return True, kwargs['default']
diff --git a/fsl/utils/imcp.py b/fsl/utils/imcp.py
index b6e35eaa8..398a18774 100644
--- a/fsl/utils/imcp.py
+++ b/fsl/utils/imcp.py
@@ -33,22 +33,22 @@ def imcp(src,
 
     :arg src:           Path to copy. If ``allowedExts`` is provided,
                         the file extension can be omitted.
-    
+
     :arg dest:          Destination path. Can be an incomplete file
-                        specification (i.e. without the extension), or a 
-                        directory. 
+                        specification (i.e. without the extension), or a
+                        directory.
 
-    :arg overwrite:     If ``True`` this function will overwrite files that 
+    :arg overwrite:     If ``True`` this function will overwrite files that
                         already exist. Defaults to ``False``.
 
     :arg useDefaultExt: Defaults to ``False``. If ``True``, the destination
-                        file type will be set according to the default 
+                        file type will be set according to the default
                         extension, specified by
                         :func:`~fsl.data.image.defaultExt`. If the source
                         file does not have the same type as the default
                         extension, it will be converted. If ``False``, the
                         source file type is not changed.
-    
+
     :arg move:          If ``True``, the files are moved, instead of being
                         copied. See :func:`immv`.
     """
@@ -64,7 +64,7 @@ def imcp(src,
     # src was specified without an
     # extension, or the specified
     # src does not have an allowed
-    # extension. 
+    # extension.
     if srcExt == '':
 
         # Try to resolve the specified src
@@ -73,8 +73,8 @@ def imcp(src,
         # addExt will raise an error
         src = fslimage.addExt(src, mustExist=True)
 
-        # We've resolved src to a 
-        # full filename - split it 
+        # We've resolved src to a
+        # full filename - split it
         # again to get its extension
         srcBase, srcExt = fslimage.splitExt(src)
 
@@ -107,10 +107,10 @@ def imcp(src,
 
     dest = destBase + destExt
 
-    # Give up if we don't have permission. 
+    # Give up if we don't have permission.
     if          not os.access(op.dirname(dest), os.W_OK | os.X_OK):
         raise fslpath.PathError('imcp error - cannot write to {}'.format(dest))
-    
+
     if move and not os.access(op.dirname(src),  os.W_OK | os.X_OK):
         raise fslpath.PathError('imcp error - cannot move from {}'.format(src))
 
@@ -118,7 +118,7 @@ def imcp(src,
     # match the destination file type,
     # we need to perform a conversion.
     #
-    # This is more expensive in terms of 
+    # This is more expensive in terms of
     # io and cpu, but programmatically
     # very easy - nibabel does all the
     # hard work.
@@ -127,7 +127,7 @@ def imcp(src,
         if not overwrite and op.exists(dest):
             raise fslpath.PathError('imcp error - destination already '
                                     'exists ({})'.format(dest))
-         
+
         img = nib.load(src)
         nib.save(img, dest)
 
@@ -140,7 +140,7 @@ def imcp(src,
     # is actually more complicated than
     # converting the file type due to
     # hdr/img pairs ...
-    # 
+    #
     # If the source is part of a file group,
     # e.g. src.img/src.hdr), we need to copy
     # the whole set of files. So here we
@@ -175,12 +175,12 @@ def imcp(src,
     copyDests = [destBase + e for (b, e) in copySrcs]
     copySrcs  = [b        + e for (b, e) in copySrcs]
 
-    # Fail if any of the destination 
+    # Fail if any of the destination
     # paths already exist
     if not overwrite and any([op.exists(d) for d in copyDests]):
         raise fslpath.PathError('imcp error - a destination path already '
                                 'exists ({})'.format(', '.join(copyDests)))
- 
+
     # Do the copy/move
     for src, dest in zip(copySrcs, copyDests):
         if move: shutil.move(src, dest)
diff --git a/fsl/utils/memoize.py b/fsl/utils/memoize.py
index 6fe330ebb..1a95638c8 100644
--- a/fsl/utils/memoize.py
+++ b/fsl/utils/memoize.py
@@ -51,7 +51,7 @@ def memoize(func):
             key = [defaultKey]
 
         key = tuple(key)
-        
+
         try:
             result = cache[key]
 
@@ -123,15 +123,15 @@ def skipUnchanged(func):
     """
 
     import numpy as np
-    
+
     cache = {}
-    
+
     def wrapper(name, value, *args, **kwargs):
 
         oldVal = cache.get(name, None)
 
         if oldVal is not None:
-            
+
             oldIsArray = isinstance(oldVal, np.ndarray)
             newIsArray = isinstance(value,  np.ndarray)
             isarray    = oldIsArray or newIsArray
@@ -140,7 +140,7 @@ def skipUnchanged(func):
             else:       nochange =        oldVal == value
 
             if nochange:
-                return False 
+                return False
 
         func(name, value, *args, **kwargs)
 
@@ -165,16 +165,16 @@ class Instanceify(object):
             def set(self, name, value):
                 self.__items[name] = value
 
-    
+
     Given this definition, a single :func:`skipUnchanged` decorator will be
     created and shared amongst all ``Container`` instances. This is not ideal,
     as the value cache created by the :func:`skipUnchanged` decorator should
     be associated with a single ``Container`` instance.
 
-    
+
     By redefining the ``Container`` class definition like so::
 
-    
+
         class Container(object):
 
             def __init__(self):
@@ -188,13 +188,13 @@ class Instanceify(object):
     a separate :func:`skipUnchanged` decorator is created for, and associated
     with, every ``Container`` instance.
 
-    
+
     This is achieved because an ``Instanceify`` instance is a descriptor. When
     first accessed as an instance attribute, an ``Instanceify`` instance will
     create the real decorator function, and replace itself on the instance.
     """
 
-    
+
     def __init__(self, realDecorator):
         """Create an ``Instanceify`` decorator.
 
diff --git a/fsl/utils/notifier.py b/fsl/utils/notifier.py
index e3ef34983..02e404486 100644
--- a/fsl/utils/notifier.py
+++ b/fsl/utils/notifier.py
@@ -64,7 +64,7 @@ class _Listener(object):
     def __str__(self):
 
         cb = self.callback
-        
+
         if cb is not None: cbName = getattr(cb, '__name__', '<callable>')
         else:              cbName = '<deleted>'
 
@@ -93,7 +93,7 @@ class Notifier(object):
         """Initialises a dictionary of listeners on a new ``Notifier``
         instance.
         """
-        
+
         new = object.__new__(cls)
 
         # Listeners are stored in this
@@ -113,7 +113,7 @@ class Notifier(object):
 
         return new
 
-        
+
     def register(self, name, callback, topic=None, runOnIdle=False):
         """Register a listener with this ``Notifier``.
 
@@ -148,13 +148,13 @@ class Notifier(object):
 
         if name in self.__listeners[topic]:
             raise Registered('Listener {} is already registered'.format(name))
-        
+
         self.__listeners[topic][name] = listener
         self.__enabled[  topic]       = self.__enabled.get(topic, True)
 
         log.debug('{}: Registered {}'.format(type(self).__name__, listener))
 
-        
+
     def deregister(self, name, topic=None):
         """De-register a listener that has been previously registered with
         this ``Notifier``.
@@ -185,7 +185,7 @@ class Notifier(object):
         if len(listeners) == 0:
             self.__listeners.pop(topic)
             self.__enabled  .pop(topic)
-        
+
         log.debug('{}: De-registered listener {}'.format(
             type(self).__name__, listener))
 
@@ -218,7 +218,7 @@ class Notifier(object):
         """Enable/disable all listeners for the specified topic.
 
         :arg topic: Topic to enable/disable listeners on. If ``None``,
-                    all listeners are enabled/disabled. 
+                    all listeners are enabled/disabled.
 
         :arg state: State to set listeners to.
         """
@@ -230,7 +230,7 @@ class Notifier(object):
             if topic in self.__enabled:
                 self.__enabled[topic] = state
 
-    
+
     def disableAll(self, topic=None):
         """Disable all listeners for the specified topic (or ``None``
         to disable all listeners).
@@ -241,13 +241,13 @@ class Notifier(object):
     def isAllEnabled(self, topic=None):
         """Returns ``True`` if all listeners for the specified topic (or all
         listeners if ``topic=None``) are enabled, ``False`` otherwise.
-        """ 
+        """
         if topic is None:
             topic = DEFAULT_TOPIC
 
         return self.__enabled.get(topic, False)
 
-        
+
     @contextlib.contextmanager
     def skipAll(self, topic=None):
         """Context manager which disables all listeners for the
@@ -267,7 +267,7 @@ class Notifier(object):
 
         try:
             yield
-            
+
         finally:
             for t, s in zip(topics, states):
                 self.enableAll(t, s)
@@ -312,7 +312,7 @@ class Notifier(object):
         finally:
             for topic, state in zip(topics, states):
                 self.enable(name, topic, state)
-        
+
 
     def notify(self, *args, **kwargs):
         """Notify all registered listeners of this ``Notifier``.
@@ -326,7 +326,7 @@ class Notifier(object):
         :arg value: A value passed through to the registered listener
                     functions. If not provided, listeners will be passed
                     a value of ``None``.
-        
+
         All other arguments passed to this method are ignored.
 
         .. note:: Listeners registered with ``runOnIdle=True`` are called
@@ -345,7 +345,7 @@ class Notifier(object):
             stack   = inspect.stack()
             frame   = stack[1]
             srcMod  = '...{}'.format(frame[1][-20:])
-            srcLine = frame[2] 
+            srcLine = frame[2]
 
             log.debug('{}: Notifying {} listeners (topic: {}) [{}:{}]'.format(
                 type(self).__name__,
@@ -355,7 +355,7 @@ class Notifier(object):
                 srcLine))
 
         for listener in listeners:
-                
+
             callback = listener.callback
             name     = listener.name
 
diff --git a/fsl/utils/path.py b/fsl/utils/path.py
index 86fd939c1..01df91f8c 100644
--- a/fsl/utils/path.py
+++ b/fsl/utils/path.py
@@ -57,8 +57,8 @@ def shallowest(path, suffixes):
     """Finds the shallowest directory which ends with one of the given
     sequence of suffixes, or returns ``None`` if no directories end
     with any of the suffixes.
-    """ 
-    
+    """
+
     path = path.strip()
 
     # We've reached the root of the file system
@@ -74,7 +74,7 @@ def shallowest(path, suffixes):
     if any([path.endswith(s) for s in suffixes]):
         return path
 
-    return None 
+    return None
 
 
 def addExt(prefix,
@@ -84,28 +84,28 @@ def addExt(prefix,
            fileGroups=None):
     """Adds a file extension to the given file ``prefix``.
 
-    If ``mustExist`` is False, and the file does not already have a 
+    If ``mustExist`` is False, and the file does not already have a
     supported extension, the default extension is appended and the new
     file name returned. If the prefix already has a supported extension,
     it is returned unchanged.
 
-    If ``mustExist`` is ``True`` (the default), the function checks to see 
-    if any files exist that have the given prefix, and a supported file 
+    If ``mustExist`` is ``True`` (the default), the function checks to see
+    if any files exist that have the given prefix, and a supported file
     extension.  A :exc:`PathError` is raised if:
 
        - No files exist with the given prefix and a supported extension.
-    
+
        - ``fileGroups`` is ``None``, and more than one file exists with the
-         given prefix, and a supported extension. 
+         given prefix, and a supported extension.
 
     Otherwise the full file name is returned.
 
     :arg prefix:      The file name prefix to modify.
 
     :arg allowedExts: List of allowed file extensions.
-    
+
     :arg mustExist:   Whether the file must exist or not.
-    
+
     :arg defaultExt:  Default file extension to use.
 
     :arg fileGroups:  Recognised file groups - see :func:`getFileGroup`.
@@ -135,7 +135,7 @@ def addExt(prefix,
     if len(allowedExts) == 0 or \
        any([prefix.endswith(ext) for ext in allowedExts]):
         allPaths = [prefix]
-        
+
     # Otherwise, make a bunch of file names, one per
     # supported extension, and test to see if exactly
     # one of them exists.
@@ -170,8 +170,8 @@ def addExt(prefix,
             raise PathError('More than one file with '
                             'prefix "{}"'.format(prefix))
 
-        # Otherwise, we return a path 
-        # to the file which matches the 
+        # Otherwise, we return a path
+        # to the file which matches the
         # first suffix in the group.
         groupIdx = groupMatches.index(True)
         allPaths = [prefix + fileGroups[groupIdx][0]]
@@ -197,14 +197,14 @@ def splitExt(filename, allowedExts=None):
     """Returns the base name and the extension from the given file name.
 
     If ``allowedExts`` is ``None``, this function is equivalent to using::
-    
+
         os.path.splitext(filename)
 
     If ``allowedExts`` is provided, but the file does not end with an allowed
     extension, a tuple containing ``(filename, '')`` is returned.
 
     :arg filename:    The file name to split.
-    
+
     :arg allowedExts: Allowed/recognised file extensions.
     """
 
@@ -220,7 +220,7 @@ def splitExt(filename, allowedExts=None):
     if not any(extMatches):
         return filename, ''
 
-    # Otherwise split the filename 
+    # Otherwise split the filename
     # into its base and its extension
     extIdx = extMatches.index(True)
     extLen = len(allowedExts[extIdx])
@@ -233,7 +233,7 @@ def getFileGroup(path,
                  fileGroups=None,
                  fullPaths=True,
                  unambiguous=False):
-    """If the given ``path`` is part of a ``fileGroup``, returns a list 
+    """If the given ``path`` is part of a ``fileGroup``, returns a list
     containing the paths to all other files in the group (including the
     ``path`` itself).
 
@@ -241,7 +241,7 @@ def getFileGroup(path,
     be part of an incomplete file group, a list containing only the ``path``
     is returned.
 
-    If the ``path`` does not exist, or appears to be part of more than one 
+    If the ``path`` does not exist, or appears to be part of more than one
     file group, a :exc:`PathError` is raised.
 
     File groups can be used to specify a collection of file suffixes which
@@ -249,10 +249,10 @@ def getFileGroup(path,
     ambiguity when multiple files exist with the same ``prefix`` and supported
     extensions (e.g. ``file.hdr`` and ``file.img``). The file groups are
     specified as a list of sequences, for example::
-    
+
         [('.img',    '.hdr'),
          ('.img.gz', '.hdr.gz')]
-    
+
     If you specify``fileGroups=[('.img', '.hdr')]`` and ``prefix='file'``, and
     both ``file.img`` and ``file.hdr`` exist, the :func:`addExt` function would
     return ``file.img`` (i.e. the file which matches the first extension in
@@ -269,18 +269,18 @@ def getFileGroup(path,
               functions are able to figure out what you mean when you specify
               ``file``, and both ``file.hdr`` and ``file.img`` (or
               ``file.hdr.gz`` and ``file.img.gz``) exist.
-    
+
     :arg path:        Path to the file. Must contain the file extension.
-    
+
     :arg allowedExts: Allowed/recognised file extensions.
-    
+
     :arg fileGroups:  Recognised file groups.
-    
+
     :arg fullPaths:   If ``True`` (the default), full file paths (relative to
                       the ``path``) are returned. Otherwise, only the file
                       extensions in the group are returned.
 
-    :arg unambiguous: Defaults to ``False``. If ``True``, and the path 
+    :arg unambiguous: Defaults to ``False``. If ``True``, and the path
                       is not unambiguouosly part of one group, or part of
                       no groups, a :exc:`PathError` is raised.
                       Otherwise, the path is returned.
@@ -288,7 +288,7 @@ def getFileGroup(path,
 
     path = addExt(path, allowedExts, mustExist=True, fileGroups=fileGroups)
     base, ext = splitExt(path, allowedExts)
- 
+
     if fileGroups is None:
         if fullPaths: return [path]
         else:         return [ext]
@@ -318,8 +318,8 @@ def getFileGroup(path,
         if fullPaths: return [path]
         else:         return [ext]
 
-    # If the given path is part of more 
-    # than one existing file group, we 
+    # If the given path is part of more
+    # than one existing file group, we
     # can't resolve this ambiguity.
     if fullMatches > 1:
         raise PathError('Path is part of multiple '
@@ -345,18 +345,18 @@ def getFileGroup(path,
     elif partialMatches > 0:
         raise PathError('Path is part of an incomplete '
                         'file group: {}'.format(path))
-        
+
     else:
         if fullPaths: return [path]
         else:         return [ext]
 
 
 def removeDuplicates(paths, allowedExts=None, fileGroups=None):
-    """Reduces the list of ``paths`` down to those which are unique with 
+    """Reduces the list of ``paths`` down to those which are unique with
     respect to the specified ``fileGroups``.
 
     For example, if you have a directory containing::
-    
+
         001.hdr
         001.img
         002.hdr
@@ -369,7 +369,7 @@ def removeDuplicates(paths, allowedExts=None, fileGroups=None):
          paths       = ['001.img', '001.hdr',
                         '002.img', '002.hdr',
                         '003.img', '003.hdr']
-    
+
          allowedExts = ['.img',  '.hdr']
          fileGroups  = [('.img', '.hdr')]
 
@@ -384,8 +384,8 @@ def removeDuplicates(paths, allowedExts=None, fileGroups=None):
 
     A :exc:`PathError` will be raised if any of the ``paths`` do not exist,
     or if there are any ambiguities with respect to incomplete paths.
-    
-    :arg paths:       List of paths to reduce. 
+
+    :arg paths:       List of paths to reduce.
 
     :arg allowedExts: Allowed/recognised file extensions.
 
@@ -401,7 +401,7 @@ def removeDuplicates(paths, allowedExts=None, fileGroups=None):
         if len(groupFiles) == 0:
             if path not in unique:
                 unique.append(path)
-                
+
         elif not any([p in unique for p in groupFiles]):
             unique.append(groupFiles[0])
 
diff --git a/fsl/utils/platform.py b/fsl/utils/platform.py
index beadc1f5c..71cdd8294 100644
--- a/fsl/utils/platform.py
+++ b/fsl/utils/platform.py
@@ -57,7 +57,7 @@ we are running standard wx Python.
 
 
 WX_PHOENIX = 2
-"""Identifier for the :attr:`Platform.wxFlavour` property, indicating that we 
+"""Identifier for the :attr:`Platform.wxFlavour` property, indicating that we
 are running wx Python/Phoenix.
 """
 
@@ -90,19 +90,19 @@ def isWidgetAlive(widget):
 
     if platform.wxFlavour == WX_PHOENIX:
         return bool(widget)
-    
+
     elif platform.wxFlavour == WX_PYTHON:
         try:
             # GetId seems to be available on all wx
             # objects, despite not being documented.
-            # 
+            #
             # I was originally calling IsEnabled,
             # but this causes segfaults if called
             # on a wx.MenuItem from within an
             # event handler on that menu item!
             widget.GetId()
             return True
-        
+
         except wx.PyDeadObjectError:
             return False
 
@@ -131,7 +131,7 @@ class Platform(notifier.Notifier):
        glIsSoftwareRenderer
     """
 
-    
+
     def __init__(self):
         """Create a ``Platform`` instance. """
 
@@ -175,7 +175,7 @@ class Platform(notifier.Notifier):
 
         self.__inSSHSession = inSSH and not inVNC
 
-                
+
     @property
     def os(self):
         """The operating system name. Whatever is returned by the built-in
@@ -183,7 +183,7 @@ class Platform(notifier.Notifier):
         """
         return builtin_platform.system()
 
-    
+
     @property
     def frozen(self):
         """``True`` if we are running in a compiled/frozen application,
@@ -201,7 +201,7 @@ class Platform(notifier.Notifier):
             return (self.canHaveGui and
                     app is not None and
                     app.IsMainLoopRunning())
-        
+
         except ImportError:
             return False
 
@@ -219,13 +219,13 @@ class Platform(notifier.Notifier):
         """
         return self.__inSSHSession
 
-    
+
     @property
     def wxPlatform(self):
         """One of :data:`WX_UNKNOWN`, :data:`WX_MAC_COCOA`,
         :data:`WX_MAC_CARBON`, or :data:`WX_GTK`, indicating the wx platform.
         """
-        
+
         if not self.haveGui:
             return WX_UNKNOWN
 
@@ -243,10 +243,10 @@ class Platform(notifier.Notifier):
             if platform is WX_UNKNOWN:
                 log.warning('Could not determine wx platform from '
                             'information: {}'.format(pi))
- 
+
         return platform
 
-    
+
     @property
     def wxFlavour(self):
         """One of :data:`WX_UNKNOWN`, :data:`WX_PYTHON` or :data:`WX_PHOENIX`,
@@ -262,7 +262,7 @@ class Platform(notifier.Notifier):
         isPhoenix = False
 
         for tag in pi:
-            if 'phoenix' in tag: 
+            if 'phoenix' in tag:
                 isPhoenix = True
                 break
 
@@ -294,7 +294,7 @@ class Platform(notifier.Notifier):
         elif value == '':          value = None
         elif not op.exists(value): value = None
         elif not op.isdir(value):  value = None
-            
+
         self.__fsldir = value
 
         if value is not None:
@@ -317,7 +317,7 @@ class Platform(notifier.Notifier):
         """
         return self.__fslVersion
 
-        
+
     @property
     def glVersion(self):
         """Returns the available OpenGL version, or ``None`` if it has not
@@ -325,7 +325,7 @@ class Platform(notifier.Notifier):
         """
         return self.__glVersion
 
-    
+
     @glVersion.setter
     def glVersion(self, value):
         """Set the available OpenGL version. """
diff --git a/fsl/utils/settings.py b/fsl/utils/settings.py
index 525f1c4c5..5fd3207c8 100644
--- a/fsl/utils/settings.py
+++ b/fsl/utils/settings.py
@@ -62,7 +62,7 @@ log = logging.getLogger(__name__)
 
 
 _CONFIG_ID = 'fslpy'
-"""The default configuration identifier, used as the directory name for 
+"""The default configuration identifier, used as the directory name for
 storing configuration files.
 """
 
@@ -72,7 +72,7 @@ def initialise(*args, **kwargs):
     :class:`Settings` instance, and enables the module-level
     functions. All settings are passed through to :meth:`Settings.__init__`.
     """
-    
+
     mod = sys.modules[__name__]
 
     settings       = Settings(*args, **kwargs)
@@ -104,7 +104,7 @@ def writeFile(*args, **kwargs):
 def deleteFile(*args, **kwargs):
     pass
 def filePath(*args, **kwargs):
-    pass 
+    pass
 def readAll(*args, **kwarg):
     return {}
 def listFiles(*args, **kwarg):
@@ -126,7 +126,7 @@ class Settings(object):
         :arg cfgid:       Configuration ID, used as the name of the
                           configuration directory.
 
-        :arg cfgdir:      Store configuration settings in this directory, 
+        :arg cfgdir:      Store configuration settings in this directory,
                           instead of the default.
 
         :arg writeOnExit: If ``True`` (the default), an ``atexit`` function
@@ -159,7 +159,7 @@ class Settings(object):
     def read(self, name, default=None):
         """Reads a setting with the given ``name``, return ``default`` if
         there is no setting called ``name``.
-        """ 
+        """
 
         log.debug('Reading {}/{}'.format(self.__configID, name))
         return self.__config.get(name, default)
@@ -180,7 +180,7 @@ class Settings(object):
 
 
     def readFile(self, path, mode='t'):
-        """Reads and returns the contents of the given file ``path``. 
+        """Reads and returns the contents of the given file ``path``.
         Returns ``None`` if the path does not exist.
 
         :arg mode: ``'t'`` for text mode, or ``'b'`` for binary.
@@ -320,7 +320,7 @@ class Settings(object):
         cfgdir  = None
         homedir = op.expanduser('~')
 
-        # On linux, if $XDG_CONFIG_HOME is set, use $XDG_CONFIG_HOME/fslpy/ 
+        # On linux, if $XDG_CONFIG_HOME is set, use $XDG_CONFIG_HOME/fslpy/
         # Otherwise, use $HOME/.config/fslpy/
         #
         # https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
@@ -348,7 +348,7 @@ class Settings(object):
                     exc_info=True)
                 cfgdir = None
 
-        # If dir creation failed, use a temporary 
+        # If dir creation failed, use a temporary
         # directory, and delete it on exit
         if cfgdir is None:
             cfgdir = tempfile.mkdtemp()
@@ -368,7 +368,7 @@ class Settings(object):
 
         log.debug('Reading {} configuration from: {}'.format(
             self.__configID, configFile))
-        
+
         try:
             with open(configFile, 'rb') as f:
                 return pickle.load(f)
@@ -380,14 +380,14 @@ class Settings(object):
 
 
     def writeConfigFile(self):
-        """Writes all settings to a file.""" 
+        """Writes all settings to a file."""
 
         config     = self.__config
         configFile = op.join(self.__configDir, 'config.pkl')
 
         log.debug('Writing {} configuration to: {}'.format(
-            self.__configID, configFile)) 
-        
+            self.__configID, configFile))
+
         try:
             with open(configFile, 'wb') as f:
                 pickle.dump(config, f)
diff --git a/fsl/utils/transform.py b/fsl/utils/transform.py
index 8713afcbd..660148d00 100644
--- a/fsl/utils/transform.py
+++ b/fsl/utils/transform.py
@@ -48,7 +48,7 @@ def scaleOffsetXform(scales, offsets):
     """Creates and returns an affine transformation matrix which encodes
     the specified scale(s) and offset(s).
 
-    
+
     :arg scales:  A tuple of up to three values specifying the scale factors
                   for each dimension. If less than length 3, is padded with
                   ``1.0``.
@@ -87,11 +87,11 @@ def compose(scales, offsets, rotations, origin=None):
     and axis rotations.
 
     :arg scales:    Sequence of three scale values.
-    
+
     :arg offsets:   Sequence of three offset values.
-    
+
     :arg rotations: Sequence of three rotation values, in radians.
-    
+
     :arg origin:    Origin of rotation - must be scaled by the ``scales``.
                     If not provided, the rotation origin is ``(0, 0, 0)``.
     """
@@ -104,12 +104,12 @@ def compose(scales, offsets, rotations, origin=None):
         preRotate[ 2, 3] = -origin[2]
         postRotate[0, 3] =  origin[0]
         postRotate[1, 3] =  origin[1]
-        postRotate[2, 3] =  origin[2] 
+        postRotate[2, 3] =  origin[2]
 
     scale  = np.eye(4, dtype=np.float64)
     offset = np.eye(4, dtype=np.float64)
     rotate = np.eye(4, dtype=np.float64)
-    
+
     scale[  0,  0] = scales[ 0]
     scale[  1,  1] = scales[ 1]
     scale[  2,  2] = scales[ 2]
@@ -139,10 +139,10 @@ def decompose(xform):
                 - A sequence of three translations
                 - A sequence of three rotations, in radians
     """
- 
+
     # The inline comments in the code below are taken verbatim from
     # the referenced article, [except for notes in square brackets].
-    
+
     # The next step is to extract the translations. This is trivial;
     # we find t_x = M_{4,1}, t_y = M_{4,2}, and t_z = M_{4,3}. At this
     # point we are left with a 3*3 matrix M' = M_{1..3,1..3}.
@@ -155,7 +155,7 @@ def decompose(xform):
     M3 = xform[2]
 
     # The process of finding the scaling factors and shear parameters
-    # is interleaved. First, find s_x = |M'_1|. 
+    # is interleaved. First, find s_x = |M'_1|.
     sx = np.sqrt(np.dot(M1, M1))
 
     # Then, compute an initial value for the xy shear factor,
@@ -191,7 +191,7 @@ def decompose(xform):
     sxz = sxz / sz
     syz = sxz / sz
 
-    # The resulting matrix now is a pure rotation matrix, except that it 
+    # The resulting matrix now is a pure rotation matrix, except that it
     # might still include a scale factor of -1. If the determinant of the
     # matrix is -1, negate the matrix and all three scaling factors. Call
     # the resulting matrix R.
@@ -225,7 +225,7 @@ def rotMatToAxisAngles(rotmat):
     else:
         xrot = np.arctan2( rotmat[2, 1], rotmat[2, 2])
         yrot = np.arctan2(-rotmat[2, 0], yrot)
-        zrot = np.arctan2( rotmat[1, 0], rotmat[0, 0]) 
+        zrot = np.arctan2( rotmat[1, 0], rotmat[0, 0])
 
     return [xrot, yrot, zrot]
 
@@ -233,12 +233,12 @@ def rotMatToAxisAngles(rotmat):
 def axisAnglesToRotMat(xrot, yrot, zrot):
     """Constructs a ``(3, 3)`` rotation matrix from the given angles, which
     must be specified in radians.
-    """ 
+    """
 
     xmat = np.eye(3)
     ymat = np.eye(3)
     zmat = np.eye(3)
-    
+
     xmat[1, 1] =  np.cos(xrot)
     xmat[1, 2] = -np.sin(xrot)
     xmat[2, 1] =  np.sin(xrot)
@@ -265,13 +265,13 @@ def axisBounds(shape,
                offset=1e-4):
     """Returns the ``(lo, hi)`` bounds of the specified axis/axes in the
     world coordinate system defined by ``xform``.
-    
+
     If the ``origin`` parameter is set to  ``centre`` (the default),
     this function assumes that voxel indices correspond to the voxel
     centre. For example, the voxel at ``(4, 5, 6)`` covers the space:
-    
+
       ``[3.5 - 4.5, 4.5 - 5.5, 5.5 - 6.5]``
-    
+
     So the bounds of the specified shape extends from the corner at
 
       ``(-0.5, -0.5, -0.5)``
@@ -283,9 +283,9 @@ def axisBounds(shape,
     If the ``origin`` parameter is set to ``corner``, this function
     assumes that voxel indices correspond to the voxel corner. In this
     case, a voxel  at ``(4, 5, 6)`` covers the space:
-    
+
       ``[4 - 5, 5 - 6, 6 - 7]``
-    
+
     So the bounds of the specified shape extends from the corner at
 
       ``(0, 0, 0)``
@@ -302,7 +302,7 @@ def axisBounds(shape,
     amount.  The ``boundary`` parameter can also be set to ``'both'``, or
     ``None``. This option is provided so that you can ensure that the
     resulting bounds will always be contained within the image space.
-    
+
     :arg shape:    The ``(x, y, z)`` shape of the data.
 
     :arg xform:    Transformation matrix which transforms voxel coordinates
@@ -313,7 +313,7 @@ def axisBounds(shape,
     :arg origin:   Either ``'centre'`` (the default) or ``'corner'``.
 
     :arg boundary: Either ``'high'`` (the default), ``'low'``, ''`both'``,
-                   or ``None``. 
+                   or ``None``.
 
     :arg offset:   Amount by which the boundary voxel coordinates should be
                    offset. Defaults to ``1e-4``.
@@ -331,17 +331,17 @@ def axisBounds(shape,
     if origin not in ('centre', 'corner'):
         raise ValueError('Invalid origin value: {}'.format(origin))
     if boundary not in ('low', 'high', 'both', None):
-        raise ValueError('Invalid boundary value: {}'.format(boundary)) 
+        raise ValueError('Invalid boundary value: {}'.format(boundary))
 
     scalar = False
 
     if axes is None:
         axes = [0, 1, 2]
-        
+
     elif not isinstance(axes, collections.Iterable):
         scalar = True
         axes   = [axes]
-    
+
     x, y, z = shape[:3]
 
     points = np.zeros((8, 3), dtype=np.float32)
@@ -362,7 +362,7 @@ def axisBounds(shape,
         x0 += offset
         y0 += offset
         z0 += offset
-        
+
     if boundary in ('high', 'both'):
         x  -= offset
         y  -= offset
@@ -385,12 +385,12 @@ def axisBounds(shape,
     if scalar: return (lo[0], hi[0])
     else:      return (lo,    hi)
 
-        
+
 def transform(p, xform, axes=None):
     """Transforms the given set of points ``p`` according to the given affine
-    transformation ``xform``. 
+    transformation ``xform``.
+
 
-    
     :arg p:     A sequence or array of points of shape :math:`N \\times  3`.
 
     :arg xform: An affine transformation matrix with which to transform the
@@ -432,7 +432,7 @@ def _fillPoints(p, axes):
     """
 
     if not isinstance(p, collections.Iterable): p = [p]
-    
+
     p = np.array(p)
 
     if axes is None: return p
@@ -480,7 +480,7 @@ def flirtMatrixToSform(flirtMat, srcImage, refImage):
     :arg srcImage: Source :class:`.Image`
     :arg refImage: Reference :class:`.Image`
     """
-    
+
     srcScaledVoxelMat    = srcImage.voxelsToScaledVoxels()
     refScaledVoxelMat    = refImage.voxelsToScaledVoxels()
     refVoxToWorldMat     = refImage.voxToWorldMat
diff --git a/fsl/utils/weakfuncref.py b/fsl/utils/weakfuncref.py
index 8cc4b9181..71350145e 100644
--- a/fsl/utils/weakfuncref.py
+++ b/fsl/utils/weakfuncref.py
@@ -21,7 +21,7 @@ class WeakFunctionRef(object):
     or attribute changes.
     """
 
-    
+
     def __init__(self, func):
         """Create a new ``WeakFunctionRef`` to encapsulate the given
         function or bound/unbound method.
@@ -46,7 +46,7 @@ class WeakFunctionRef(object):
 
         # Unbound/class method or function
         else:
- 
+
             self.obj      = None
             self.objType  = None
             self.func     = weakref.ref(func)
@@ -67,20 +67,20 @@ class WeakFunctionRef(object):
         if func is None: return '{} <dead>'.format(s)
         else:            return s
 
-        
+
     def __repr__(self):
         """Return a string representation of the function."""
         return self.__str__()
 
-    
+
     def __isMethod(self, func):
         """Returns ``True`` if the given function is a bound method,
         ``False`` otherwise.
-        
+
         This seems to be one of the few areas where python 2 and 3 are
         irreconcilably incompatible (or just where :mod:`six` does not have a
         function to help us).
-        
+
         In Python 2 there is no difference between an unbound method and a
         function. But in Python 3, an unbound method is still a method (and
         inspect.ismethod returns True).
@@ -101,7 +101,7 @@ class WeakFunctionRef(object):
             ismethod = inspect.ismethod(func)
 
         return ismethod
-    
+
 
     def __findPrivateMethod(self):
         """Finds and returns the bound method associated with the encapsulated
@@ -131,7 +131,7 @@ class WeakFunctionRef(object):
 
         return None
 
-    
+
     def function(self):
         """Return a reference to the encapsulated function or method,
         or ``None`` if the function has been garbage collected.
@@ -152,5 +152,5 @@ class WeakFunctionRef(object):
 
         # If the function is a bound private method,
         # its name on the instance will have been
-        # mangled, so we need to search for it 
+        # mangled, so we need to search for it
         except: return self.__findPrivateMethod()
-- 
GitLab