diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index ae0265c539277cfb1ea01db8b3a2d28ae45dd5e1..0e30f366c23da2d3cd47669a2ffa02ad1fc96151 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -2,6 +2,33 @@ This document contains the ``fslpy`` release history in reverse chronological
 order.
 
 
+
+1.10.0 (Under development)
+--------------------------
+
+
+Added
+^^^^^
+
+
+* A new script, :mod:`.extract_noise`, which can be used to extract ICA
+  component time courses from a MELODIC ICA analysis.
+* New :func:`.path.allFiles` function which returns all files underneath a
+  directory.
+* The :func:`.fileOrImage`   and :func:`.fileOrArray` decorators now support
+  loading of files which are specified with an output basename.
+
+
+Changed
+^^^^^^^
+
+
+* When using the :func:`.run.run` function, the command output/error streams
+  are now forwarded immediately.
+* Removed dependency on ``pytest-runner``.
+
+
+
 1.9.0 (Monday June 4th 2018)
 ----------------------------
 
diff --git a/doc/contributing.rst b/doc/contributing.rst
index 251f98c2d5a4db188f9bb8e4c8dd424512f60840..4e76856b7505fe98a204090b96afa0dcb6cd825a 100644
--- a/doc/contributing.rst
+++ b/doc/contributing.rst
@@ -32,17 +32,17 @@ To aid readability, all commit messages should be prefixed with one or more of
 the following labels (this convention has been inherited from `nibabel
 <https://github.com/nipy/nibabel>`_):
 
-  * *BF* : bug fix
-  * *RF* : refactoring
-  * *NF* : new feature
-  * *BW* : addresses backward-compatibility
+  * *BF*  : bug fix
+  * *RF*  : refactoring
+  * *NF*  : new feature
+  * *BW*  : addresses backward-compatibility
   * *OPT* : optimization
-  * *BK* : breaks something and/or tests fail
-  * *PL* : making pylint happier
-  * *DOC*: for all kinds of documentation related commits
+  * *BK*  : breaks something and/or tests fail
+  * *PL*  : making pylint happier
+  * *DOC* : for all kinds of documentation related commits
   * *TEST*: for adding or changing tests
-  * *MAINT*: for administrative/maintenance changes
-  * *CI*: for continuous-integration changes
+  * *MNT* : for administrative/maintenance changes
+  * *CI*  : for continuous-integration changes
 
 
 Version number
diff --git a/fsl/data/fixlabels.py b/fsl/data/fixlabels.py
index d5814d79b2b2f41bc2441eabe0bcf261d40ce06b..8974cc79c9c8971649814904e11fe7f6c0f32cb4 100644
--- a/fsl/data/fixlabels.py
+++ b/fsl/data/fixlabels.py
@@ -19,7 +19,10 @@
 import os.path as op
 
 
-def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
+def loadLabelFile(filename,
+                  includeLabel=None,
+                  excludeLabel=None,
+                  returnIndices=False):
     """Loads component labels from the specified file. The file is assuemd
     to be of the format generated by FIX, Melview or ICA-AROMA; such a file
     should have a structure resembling the following::
@@ -70,23 +73,30 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
     *bad* components, i.e. those components which are not classified as
     signal or unknown.
 
-    :arg filename:     Name of the label file to load.
-
-    :arg includeLabel: If the file contains a single line containing a list
-                       component indices, this label will be used for the
-                       components in the list. Defaults to 'Unclassified
-                       noise' for FIX-like files, and 'Movement' for
-                       ICA-AROMA-like files.
-
-    :arg excludeLabel: If the file contains a single line containing component
-                       indices, this label will be used for the components
-                       that are not in the list.  Defaults to 'Signal' for
-                       FIX-like files, and 'Unknown' for ICA-AROMA-like files.
-
-    :returns: A tuple containing the path to the melodic directory
-              as specified in the label file, and a list of lists, one
-              list per component, with each list containing the labels for
-              the corresponding component.
+    :arg filename:      Name of the label file to load.
+
+    :arg includeLabel:  If the file contains a single line containing a list
+                        component indices, this label will be used for the
+                        components in the list. Defaults to 'Unclassified
+                        noise' for FIX-like files, and 'Movement' for
+                        ICA-AROMA-like files.
+
+    :arg excludeLabel:  If the file contains a single line containing component
+                        indices, this label will be used for the components
+                        that are not in the list.  Defaults to 'Signal' for
+                        FIX-like files, and 'Unknown' for ICA-AROMA-like files.
+
+    :arg returnIndices: Defaults to ``False``. If ``True``, a list containing
+                        the noisy component numbers that were listed in the
+                        file is returned.
+
+    :returns: A tuple containing:
+                - The path to the melodic directory as specified in the label
+                  file
+                - A list of lists, one list per component, with each list
+                  containing the labels for the corresponding component.
+                - If ``returnIndices is True``, a list of the noisy component
+                  indices (starting from 1) that were specified in the file.
     """
 
     signalLabels = None
@@ -176,7 +186,7 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
             try:
                 compIdx = int(tokens[0])
 
-            except:
+            except ValueError:
                 raise InvalidLabelFileError(
                     'Invalid FIX classification file - '
                     'line {}: {}'.format(i + 1, compLine))
@@ -203,7 +213,6 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
         noise = isNoisyComponent(labels, signalLabels)
 
         if noise and (comp not in noisyComps):
-            print(signalLabels)
             raise InvalidLabelFileError('Noisy component {} has invalid '
                                         'labels: {}'.format(comp, labels))
 
@@ -217,7 +226,8 @@ def loadLabelFile(filename, includeLabel=None, excludeLabel=None):
             raise InvalidLabelFileError('Noisy component {} is missing '
                                         'a noise label'.format(comp))
 
-    return melDir, allLabels
+    if returnIndices: return melDir, allLabels, noisyComps
+    else:             return melDir, allLabels
 
 
 def saveLabelFile(allLabels,
diff --git a/fsl/scripts/extract_noise.py b/fsl/scripts/extract_noise.py
new file mode 100644
index 0000000000000000000000000000000000000000..087aceb813d8d8459786d03dca15f254b4758844
--- /dev/null
+++ b/fsl/scripts/extract_noise.py
@@ -0,0 +1,234 @@
+#!/usr/bin/env python
+#
+# extract_noise.py - Extract ICA component time courses from a MELODIC
+#                    directory.
+#
+# Author: Paul McCarthy <pauldmccarthy@gmail.com>
+#
+"""This module defines the ``extract_noise`` script, for extracting component
+time series from a MELODIC ``.ica`` directory.
+"""
+
+
+from __future__ import print_function
+
+import os.path as op
+import            sys
+import            argparse
+import            warnings
+
+import numpy   as np
+
+# See atlasq.py for explanation
+with warnings.catch_warnings():
+    warnings.filterwarnings("ignore", category=FutureWarning)
+
+    import fsl.data.fixlabels       as fixlabels
+    import fsl.data.melodicanalysis as melanalysis
+
+
+DTYPE = np.float64
+name  = "extract_noise"
+desc  = 'Extract component time series from a MELODIC .ica directory'
+usage = """
+{name}: {desc}
+Usage:
+  {name} <.ica directory> [-o outfile] <fixfile>
+  {name} <.ica directory> [-o outfile] <component> [<component> ...]
+  {name} <.ica directory> [-o outfile] [-c conffile] [-c conffile] <fixfile>
+  {name} <.ica directory> [-o outfile] [-c conffile] [-c conffile] <component> [<component> ...]
+""".format(name=name, desc=desc).strip()  # noqa
+
+
+helps = {
+    'outfile' :
+    'File to save time series to',
+
+    'overwrite' :
+    'Overwrite output file if it exists',
+
+    'icadir' :
+    '.ica directory to extract time series from.',
+
+    'component' :
+    'Component number or FIX/AROMA file specifying components to extract.',
+
+    'confound' :
+    'Extra files to append to output file.',
+}
+
+
+def parseArgs(args):
+    """Parses command line arguments.
+
+    :arg args: Sequence of command line arguments.
+    :returns:  An ``argparse.Namespace`` object containing parsed arguments.
+    """
+
+    if len(args) == 0:
+        print(usage)
+        sys.exit(0)
+
+    parser = argparse.ArgumentParser(prog=name,
+                                     usage=usage,
+                                     description=desc)
+
+    parser.add_argument('-o', '--outfile',
+                        help=helps['outfile'],
+                        default='confound_timeseries.txt')
+    parser.add_argument('-ow', '--overwrite',
+                        action='store_true',
+                        help=helps['overwrite'])
+    parser.add_argument('-c', '--conffile',
+                        action='append',
+                        help=helps['confound'])
+    parser.add_argument('icadir',
+                        help=helps['icadir'])
+    parser.add_argument('components',
+                        nargs='+',
+                        help=helps['component'])
+
+    args = parser.parse_args(args)
+
+    # Error if ica directory does not exist
+    if not op.exists(args.icadir):
+        print('ICA directory {} does not exist'.format(args.icadir))
+        sys.exit(1)
+
+    # Error if output exists, but overwrite not specified
+    if op.exists(args.outfile) and not args.overwrite:
+        print('Output file {} already exists and --overwrite not '
+              'specified'.format(args.outfile))
+        sys.exit(1)
+
+    # Convert components into integers,
+    # or absolute file paths, and error
+    # if any are not one of these.
+    for i, c in enumerate(args.components):
+        if op.exists(c):
+            args.components[i] = op.abspath(c)
+        else:
+            try:
+                args.components[i] = int(c)
+            except ValueError:
+                print('Bad component: {}. Components must either be component '
+                      'indices (starting from 1), or paths to FIX/AROMA '
+                      'files.')
+                sys.exit(1)
+
+    # Convert confound files to absolute
+    # paths, error if any do not exist.
+    if args.conffile is None:
+        args.conffile = []
+    for i, cf in enumerate(args.conffile):
+        if not op.exists(cf):
+            print('Confound file does not exist: {}'.format(cf))
+            sys.exit(1)
+        args.conffile[i] = op.abspath(cf)
+
+    args.outfile = op.abspath(args.outfile)
+    args.icadir  = op.abspath(args.icadir)
+
+    return args
+
+
+def genComponentIndexList(comps, ncomps):
+    """Turns the given sequence of integers and file paths into a list
+    of 0-based component indices.
+
+    :arg comps:  Sequence containing 1-based component indices, and/or paths
+                 to FIX/AROMA label text files.
+
+    :arg ncomps: Number of components in the input data - indices larger than
+                 this will be ignored.
+
+    :returns:    List of 0-based component indices.
+    """
+
+    allcomps = []
+
+    for c in comps:
+        if isinstance(c, int):
+            ccomps = [c]
+        else:
+            ccomps = fixlabels.loadLabelFile(c, returnIndices=True)[2]
+
+        allcomps.extend([c - 1 for c in ccomps])
+
+    if any([c < 0 or c >= ncomps for c in allcomps]):
+        raise ValueError('Invalid component indices: {}'.format(allcomps))
+
+    return list(sorted(set(allcomps)))
+
+
+def loadConfoundFiles(conffiles, npts):
+    """Loads the given confound files, and copies them all into a single 2D
+    ``(npoints, nconfounds)`` matrix.
+
+    :arg conffiles: Sequence of paths to files containing confound time series
+                    (where each row corresponds to a time point, and each
+                    column corresponds to a single confound).
+
+    :arg npts:      Expected number of time points
+
+    :returns:       A ``(npoints, nconfounds)`` ``numpy`` matrix.
+    """
+
+    matrices = []
+
+    for cfile in conffiles:
+
+        mat = np.loadtxt(cfile, dtype=DTYPE)
+
+        if len(mat.shape) == 1:
+            mat = np.atleast_2d(mat).T
+
+        if mat.shape[0] != npts:
+            raise ValueError('Confound file {} does not have correct number '
+                             'of points (expected {}, has {})'.format(
+                                 cfile, npts, mat.shape[0]))
+
+        matrices.append(mat)
+
+    ncols     = sum([m.shape[1] for m in matrices])
+    confounds = np.zeros((npts, ncols), dtype=DTYPE)
+
+    coli = 0
+
+    for mat in matrices:
+        matcols                           = mat.shape[1]
+        confounds[:, coli:coli + matcols] = mat
+        coli                              = coli + matcols
+
+    return confounds
+
+
+def main(argv=None):
+    """Entry point for the ``extract_noise`` script.
+
+    Identifies component time series to extract, extracts them, loads extra
+    confound files, and saves them out to a file.
+    """
+
+    if argv is None:
+        argv = sys.argv[1:]
+
+    args = parseArgs(argv)
+
+    try:
+        ts           = melanalysis.getComponentTimeSeries(args.icadir)
+        npts, ncomps = ts.shape
+        confs        = loadConfoundFiles(args.conffile, npts)
+        comps        = genComponentIndexList(args.components, ncomps)
+        ts           = ts[:, comps]
+
+    except Exception as e:
+        print(e)
+        sys.exit(1)
+
+    ts = np.hstack((ts, confs))
+    np.savetxt(args.outfile, ts, fmt='%10.5f')
+
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/fsl/utils/path.py b/fsl/utils/path.py
index 76efc7760f03348b2b3d96c8d21f7f3c7d09930f..fc8efeadb4426a245a72ffe60a63cb6264784d8d 100644
--- a/fsl/utils/path.py
+++ b/fsl/utils/path.py
@@ -13,6 +13,7 @@ paths.
 
    deepest
    shallowest
+   allFiles
    hasExt
    addExt
    removeExt
@@ -24,8 +25,9 @@ paths.
 """
 
 
-import            glob
 import os.path as op
+import            os
+import            glob
 
 
 class PathError(Exception):
@@ -78,6 +80,20 @@ def shallowest(path, suffixes):
     return None
 
 
+def allFiles(root):
+    """Return a list containing all files which exist underneath the specified
+    ``root`` directory.
+    """
+
+    files = []
+
+    for dirpath, _, filenames in os.walk(root):
+        filenames = [op.join(dirpath, f) for f in filenames]
+        files.extend(filenames)
+
+    return files
+
+
 def hasExt(path, allowedExts):
     """Convenience function which returns ``True`` if the given ``path``
     ends with any of the given ``allowedExts``, ``False`` otherwise.
diff --git a/fsl/utils/tempdir.py b/fsl/utils/tempdir.py
index b819058f804d22a225fc37f5ba15eb5418c55de8..23bf4d774aeac30b13bb34d5ca14d62ea518aced 100644
--- a/fsl/utils/tempdir.py
+++ b/fsl/utils/tempdir.py
@@ -21,7 +21,7 @@ import contextlib
 
 
 @contextlib.contextmanager
-def tempdir(root=None, changeto=True):
+def tempdir(root=None, changeto=True, override=None):
     """Returns a context manager which creates and returns a temporary
     directory, and then deletes it on exit.
 
@@ -32,17 +32,25 @@ def tempdir(root=None, changeto=True):
     :arg changeto: If ``True`` (the default), current working directory is set
                    to the new temporary directory before yielding, and restored
                    afterwards.
+
+    :arg override: Don't create a temporary directory, but use this one
+                   instead. This allows ``tempdir`` to be used as a context
+                   manager when a temporary directory already exists.
     """
 
-    testdir = tempfile.mkdtemp(dir=root)
-    prevdir = os.getcwd()
-    try:
+    if override is None:
+        testdir = tempfile.mkdtemp(dir=root)
+        prevdir = os.getcwd()
+    else:
+        testdir = override
 
+    try:
         if changeto:
             os.chdir(testdir)
         yield testdir
 
     finally:
-        if changeto:
-            os.chdir(prevdir)
-        shutil.rmtree(testdir)
+        if override is None:
+            if changeto:
+                os.chdir(prevdir)
+            shutil.rmtree(testdir)
diff --git a/fsl/wrappers/__init__.py b/fsl/wrappers/__init__.py
index 8363c6d5a92460fd4222894dc45642dc892b2bf1..3a0297a3ef7af4574087ff3e4f3ace977d1b8d3f 100644
--- a/fsl/wrappers/__init__.py
+++ b/fsl/wrappers/__init__.py
@@ -81,6 +81,7 @@ from .bet          import (bet,             # noqa
                            robustfov)
 from .eddy         import (eddy_cuda,       # noqa
                            topup)
+from .fast         import (fast,)           # noqa
 from .flirt        import (flirt,           # noqa
                            invxfm,
                            applyxfm,
diff --git a/fsl/wrappers/bet.py b/fsl/wrappers/bet.py
index 969dcb3cdf7920f20d8dcb15fd05e6bd6adf03eb..09282f117b26374ab99a40420976fa0c1771dc8d 100644
--- a/fsl/wrappers/bet.py
+++ b/fsl/wrappers/bet.py
@@ -14,7 +14,7 @@ import fsl.utils.assertions as asrt
 from . import wrapperutils  as wutils
 
 
-@wutils.fileOrImage('input', 'output')
+@wutils.fileOrImage('input', outprefix='output')
 @wutils.fslwrapper
 def bet(input, output, **kwargs):
     """Wrapper for the ``bet`` command.
diff --git a/fsl/wrappers/fast.py b/fsl/wrappers/fast.py
new file mode 100644
index 0000000000000000000000000000000000000000..a944ea77ad31f2c8487c2e8e4aec2e3b8aea1b77
--- /dev/null
+++ b/fsl/wrappers/fast.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+#
+# fast.py - Wrapper for the FSL fast command.
+#
+# Author: Martin Craig <martin.craig@eng.ox.ac.uk>
+#         Paul McCarthy <pauldmccarthy@gmail.com>
+#
+"""This module provides the :func:`fast` function, a wrapper for the FSL
+`FAST <https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FAST>`_ command.
+"""
+
+
+import six
+
+import fsl.utils.assertions as asrt
+from . import wrapperutils  as wutils
+
+
+@wutils.fileOrImage('imgs', 'A', 's', 'manualseg', outprefix='out')
+@wutils.fileOrArray('a')
+@wutils.fslwrapper
+def fast(imgs, out='fast', **kwargs):
+    """Wrapper for the ``fast`` command.
+
+    :arg imgs:      Input image(s)
+    :arg out:       Output basename
+    :arg n_classes: Number of tissue classes (corresponds to the ``--class``
+                    command line option)
+    """
+
+    if isinstance(imgs, six.string_types):
+        imgs = [imgs]
+
+    asrt.assertIsNifti(*imgs)
+
+    argmap = {
+        'n_classes' : 'class',
+    }
+
+    cmd  = ['fast', '-v', '--out=%s' % out]
+    cmd += wutils.applyArgStyle('--=', argmap=argmap, **kwargs)
+    cmd += imgs
+
+    return cmd
diff --git a/fsl/wrappers/flirt.py b/fsl/wrappers/flirt.py
index d4ff35311b3ad4255d6b2d4be42390046f0d3a99..8b5d1d526dc6a893441204d788d58b7302d59260 100644
--- a/fsl/wrappers/flirt.py
+++ b/fsl/wrappers/flirt.py
@@ -95,15 +95,29 @@ def concatxfm(inmat1, inmat2, outmat):
     return cmd
 
 
-@wutils.fileOrImage('infile', 'out', 'reffile')
-@wutils.fileOrArray('init')
+@wutils.fileOrImage('infile', 'out', 'reffile', outprefix='out')
+@wutils.fileOrArray('init', outprefix='out')
 @wutils.fslwrapper
 def mcflirt(infile, **kwargs):
     """Wrapper for the ``mcflirt`` command."""
 
     asrt.assertIsNifti(infile)
 
+    argmap = {
+        'twod' : '2d',
+    }
+
+    valmap = {
+        '2d'      : wutils.SHOW_IF_TRUE,
+        'gdt'     : wutils.SHOW_IF_TRUE,
+        'meanvol' : wutils.SHOW_IF_TRUE,
+        'stats'   : wutils.SHOW_IF_TRUE,
+        'mats'    : wutils.SHOW_IF_TRUE,
+        'plots'   : wutils.SHOW_IF_TRUE,
+        'report'  : wutils.SHOW_IF_TRUE,
+    }
+
     cmd  = ['mcflirt', '-in', infile]
-    cmd += wutils.applyArgStyle('-', **kwargs)
+    cmd += wutils.applyArgStyle('-', argmap=argmap, valmap=valmap, **kwargs)
 
     return cmd
diff --git a/fsl/wrappers/wrapperutils.py b/fsl/wrappers/wrapperutils.py
index cd59a55b82b9055d8489bcf1b0f253f27e5d5ef3..eb7bd529e85a918c55eb3df9a37ff33258297ee0 100644
--- a/fsl/wrappers/wrapperutils.py
+++ b/fsl/wrappers/wrapperutils.py
@@ -85,24 +85,36 @@ and returned::
 """
 
 
-import os.path as op
-import            os
-import            sys
-import            inspect
-import            tempfile
-import            warnings
-import            functools
-import            collections
+import itertools as it
+import os.path   as op
+import              os
+import              re
+import              sys
+import              glob
+import              shutil
+import              random
+import              string
+import              fnmatch
+import              inspect
+import              logging
+import              tempfile
+import              warnings
+import              functools
+import              collections
 
 import            six
 import nibabel as nib
 import numpy   as np
 
-import fsl.utils.tempdir as tempdir
 import fsl.utils.run     as run
+import fsl.utils.path    as fslpath
+import fsl.utils.tempdir as tempdir
 import fsl.data.image    as fslimage
 
 
+log = logging.getLogger(__name__)
+
+
 def _update_wrapper(wrapper, wrapped, *args, **kwargs):
     """Replacement for the built-in ``functools.update_wrapper``. This
     implementation ensures that the wrapper function has an attribute
@@ -483,7 +495,8 @@ class _FileOrThing(object):
 
 
     ``_FileOrThing`` decorators can be used with any other decorators
-    **as long as** they do not manipulate the return value.
+    **as long as** they do not manipulate the return value, and as long as
+    the ``_FileOrThing`` decorators are adjacent to each other.
     """
 
 
@@ -505,25 +518,42 @@ class _FileOrThing(object):
             return self.__output
 
 
-    def __init__(self, func, prepIn, prepOut, load, *things):
+    def __init__(self,
+                 func,
+                 prepIn,
+                 prepOut,
+                 load,
+                 removeExt,
+                 *args,
+                 **kwargs):
         """Initialise a ``_FileOrThing`` decorator.
 
-        :arg func:    The function to be decorated.
+        :arg func:      The function to be decorated.
+
+        :arg prepIn:    Function which returns a file name to be used in
+                        place of an input argument.
+
+        :arg prepOut:   Function which generates a file name to use for
+                        arguments that were set to :data:`LOAD`.
 
-        :arg prepIn:  Function which returns a file name to be used in
-                      place of an input argument.
+        :arg load:      Function which is called to load items for arguments
+                        that were set to :data:`LOAD`. Must accept a file path
+                        as its sole argument.
 
-        :arg prepOut: Function which generates a file name to use for
-                      arguments that were set to :data:`LOAD`.
+        :arg removeExt: Function which can remove a file extension from a file
+                        path.
 
-        :arg load:    Function which is called to load items for arguments
-                      that were set to :data:`LOAD`. Must accept a file path
-                      as its sole argument.
+        :arg outprefix: Must be passed as a keyword argument. The name of a
+                        positional or keyword argument to the function, which
+                        specifies an output file name prefix.  All other
+                        arguments with names that begin with this prefix may
+                        be interpreted as things to ``LOAD``.
+
+        All other positional arguments are interpreted as the names of the
+        arguments to the function which will be handled by this
+        ``_FileOrThing`` decorator. If not provided, *all* arguments passed to
+        the function will be handled.
 
-        :arg things:  Names of all arguments which will be handled by
-                      this ``_FileOrThing`` decorator. If not provided,
-                      *all* arguments passed to the function will be
-                      handled.
 
         The ``prepIn`` and ``prepOut`` functions must accept the following
         positional arguments:
@@ -535,11 +565,13 @@ class _FileOrThing(object):
 
           - The argument value that was passed in
         """
-        self.__func    = func
-        self.__prepIn  = prepIn
-        self.__prepOut = prepOut
-        self.__load    = load
-        self.__things  = things
+        self.__func      = func
+        self.__prepIn    = prepIn
+        self.__prepOut   = prepOut
+        self.__load      = load
+        self.__removeExt = removeExt
+        self.__things    = args
+        self.__outprefix = kwargs.get('outprefix', None)
 
 
     def __call__(self, *args, **kwargs):
@@ -553,18 +585,46 @@ class _FileOrThing(object):
         func     = self.__func
         argnames = namedPositionals(func, args)
 
+        # If this _FileOrThing is being called
+        # by another _FileOrThing don't create
+        # another working directory. We do this
+        # sneakily, by setting an attribute on
+        # the wrapped function which stores the
+        # current working directory.
+        wrapped     = _unwrap(func)
+        fot_workdir = getattr(wrapped, '_fot_workdir', None)
+        parent      = fot_workdir is None
+
         # Create a tempdir to store any temporary
         # input/output things, but don't change
         # into it, as file paths passed to the
         # function may be relative.
-        with tempdir.tempdir(changeto=False) as td:
+        with tempdir.tempdir(changeto=False, override=fot_workdir) as td:
+
+            log.debug('Redirecting LOADed outputs to %s', td)
 
             # Replace any things with file names.
             # Also get a list of LOAD outputs
-            args, kwargs, outfiles = self.__prepareArgs(
-                td, argnames, args, kwargs)
+            args = self.__prepareArgs(parent, td, argnames, args, kwargs)
+            args, kwargs, outprefix, outfiles, prefixes = args
+
+            # The prefix/patterns may be
+            # overridden by a parent FoT
+            outprefix = getattr(wrapped, '_fot_outprefix', outprefix)
+            prefixes  = getattr(wrapped, '_fot_prefixes',  prefixes)
+
+            # if there are any other FileOrThings
+            # in the decorator chain, get them to
+            # use our working directory, and
+            # prefixes, instead of creating their
+            # own.
+            if parent:
+                setattr(wrapped, '_fot_workdir',   td)
+                setattr(wrapped, '_fot_outprefix', outprefix)
+                setattr(wrapped, '_fot_prefixes',  prefixes)
 
             # Call the function
+<<<<<<< HEAD
             result = func(*args, **kwargs)
 
             # make a _Reults object to store
@@ -578,16 +638,33 @@ class _FileOrThing(object):
             # Load the LOADed outputs
             for oname, ofile in outfiles.items():
                 if op.exists(ofile): result[oname] = self.__load(ofile)
+=======
+            try:
+                result = func(*args, **kwargs)
 
-            return result
+            finally:
+                # if we're the top-level FileOrThing
+                # decorator, remove the attributes we
+                # added above.
+                if parent:
+                    delattr(wrapped, '_fot_workdir')
+                    delattr(wrapped, '_fot_outprefix')
+                    delattr(wrapped, '_fot_prefixes')
+>>>>>>> enh/wrappers
+
+            return self.__generateResult(
+                td, result, outprefix, outfiles, prefixes)
 
 
-    def __prepareArgs(self, workdir, argnames, args, kwargs):
+    def __prepareArgs(self, parent, workdir, argnames, args, kwargs):
         """Prepares all input and output arguments to be passed to the
         decorated function. Any arguments with a value of :data:`LOAD` are
         passed to the ``prepOut`` function specified at :meth:`__init__`.
         All other arguments are passed through the ``prepIn`` function.
 
+        :arg parent:  ``True`` if this ``_FileOrThing`` is the first in a
+                      chain of ``_FileOrThing`` decorators.
+
         :arg workdir: Directory in which all temporary files should be stored.
 
         :arg args:    Positional arguments to be passed to the decorated
@@ -601,47 +678,229 @@ class _FileOrThing(object):
 
                         - An updated copy of ``kwargs``.
 
+                        - The output file prefix that was actually passed in
+                          (it is subsequently modified so that prefixed outputs
+                          are redirected to a temporary location). All prefixed
+                          outputs that are not ``LOAD``ed should be moved into
+                          this directory. ``None`` if there is no output
+                          prefix.
+
                         - A dictionary of ``{ name : filename }`` mappings,
                           for all arguments with a value of ``LOAD``.
+
+                        - A dictionary   ``{ filepat : replstr }`` paths, for
+                          all output-prefix arguments with a value of ``LOAD``.
         """
 
-        outfiles = dict()
+        # These containers keep track
+        # of output files which are to
+        # be loaded into memory
+        outfiles      = dict()
+        prefixedFiles = dict()
 
         allargs  = {k : v for k, v in zip(argnames, args)}
         allargs.update(kwargs)
 
+        # Has an output prefix been specified?
+        prefix     = allargs.get(self.__outprefix, None)
+        realPrefix = None
+
+        # Prefixed outputs are only
+        # managed by the parent
+        # _FileOrthing in a chain of
+        # FoT decorators.
+        if not parent:
+            prefix = None
+
+        # If so, replace it with a new output
+        # prefix which will redirect all output
+        # to the temp dir.
+        #
+        # Importantly, here we assume that the
+        # underlying function (and hence the
+        # underlying command-line tool) will
+        # accept an output prefix which contains
+        # a directory path.
+        if prefix is not None:
+
+            # If prefix is set to LOAD,
+            # all generated output files
+            # should be loaded - we use a
+            # randomly generated prefix,
+            # and add it to prefixedFiles,
+            # so that every file which
+            # starts with it will be
+            # loaded.
+            if prefix is LOAD:
+                prefix                = random.sample(string.ascii_letters, 10)
+                prefix                = ''.join(prefix)
+                prefixedFiles[prefix] = self.__outprefix
+
+            realPrefix                = prefix
+            fakePrefix                = op.join(workdir, prefix)
+            allargs[self.__outprefix] = fakePrefix
+
+            log.debug('Replacing output prefix: %s -> %s',
+                      realPrefix, fakePrefix)
+
+            # If the prefix specifies a
+            # directory, make sure it
+            # exists (remember that we're
+            # in a temporary directory)
+            pdir = op.dirname(fakePrefix)
+            if pdir != '' and not op.exists(pdir):
+                os.makedirs(pdir)
+
         if len(self.__things) > 0: things = self.__things
         else:                      things = allargs.keys()
 
-        for name in things:
+        for name, val in list(allargs.items()):
 
-            val = allargs.get(name, None)
+            # don't process the
+            # outprefix argument
+            if name == self.__outprefix:
+                continue
+
+            # is this argument referring
+            # to a prefixed output?
+            isprefixed = (prefix is not None and
+                          name.startswith(prefix))
 
+<<<<<<< HEAD
             if val is None:
                 allargs.pop(name, None)
+=======
+            if not (isprefixed or name in things):
+>>>>>>> enh/wrappers
                 continue
 
-            if val is LOAD:
+            # Prefixed output files may only
+            # be given a value of LOAD
+            if isprefixed and val is not LOAD:
+                raise ValueError('Cannot specify name of prefixed file - the '
+                                 'name is defined by the output prefix: '
+                                 '{}'.format(name))
 
-                outfile = self.__prepOut(workdir, name, val)
+            if val is LOAD:
 
-                if outfile is not None:
-                    allargs[ name] = outfile
+                # this argument refers to an output
+                # that is generated from the output
+                # prefix argument, and doesn't map
+                # directly to an argument of the
+                # function. So we don't pass it
+                # through.
+                if isprefixed:
+                    prefixedFiles[name] = name
+                    allargs.pop(name)
+
+                # regular output-file argument
+                else:
+                    outfile = self.__prepOut(workdir, name, val)
                     outfiles[name] = outfile
-            else:
+                    allargs[ name] = outfile
 
-                infile = self.__prepIn(workdir, name, val)
+            # Assumed to be an input file
+            else:
+                # sequences may be
+                # accepted for inputs
+                if isinstance(val, (list, tuple)):
+                    infile = list(val)
+                    for i, v in enumerate(val):
+                        v = self.__prepIn(workdir, name, v)
+                        if v is not None:
+                            infile[i] = v
+
+                else:
+                    infile = self.__prepIn(workdir, name, val)
 
                 if infile is not None:
                     allargs[name] = infile
 
+        if realPrefix is not None and len(prefixedFiles) == 0:
+            allargs[self.__outprefix] = realPrefix
+
         args   = [allargs.pop(k) for k in argnames]
         kwargs = allargs
 
-        return args, kwargs, outfiles
+        return args, kwargs, realPrefix, outfiles, prefixedFiles
+
+
+    def __generateResult(
+            self, workdir, result, outprefix, outfiles, prefixes):
+        """Loads function outputs and returns a :class:`_Results` object.
+
+        Called by :meth:`__call__` after the decorated function has been
+        called. Figures out what files should be loaded, and loads them into
+        a ``_Results`` object.
+
+        :arg workdir:   Directory which contains the function outputs.
+        :arg result:    Function return value.
+        :arg outprefix: Original output prefix that was passed into the
+                        function (or ``None`` if one wasn't passed)
+        :arg outfiles:  Dictionary containing output files to be loaded (see
+                        :meth:`__prepareArgs`).
+        :arg prefixes:  Dictionary containing output-prefix patterns to be
+                        loaded (see :meth:`__prepareArgs`).
+
+        :returns:       A ``_Results`` object containing all loaded outputs.
+        """
+
+        # make a _Results object to store
+        # the output. If we are decorating
+        # another _FileOrThing, the
+        # results will get merged together
+        # into a single _Results dict.
+        if not isinstance(result, _FileOrThing._Results):
+            result = _FileOrThing._Results(result)
+
+        # Load the LOADed outputs
+        for oname, ofile in outfiles.items():
 
+            log.debug('Loading output %s: %s', oname, ofile)
 
-def fileOrImage(*imgargs):
+            if op.exists(ofile): oval = self.__load(ofile)
+            else:                oval = None
+
+            result[oname] = oval
+
+        # No output prefix - we're done
+        if outprefix is None or len(prefixes) == 0:
+            return result
+
+        # Load or move output-prefixed files.
+        # Find all files with a name that
+        # matches the prefix that was passed
+        # in (recursing into matching sub-
+        # directories too).
+        allPrefixed = glob.glob(op.join(workdir, '{}*'.format(outprefix)))
+        allPrefixed = [fslpath.allFiles(f) if op.isdir(f) else [f]
+                       for f in allPrefixed]
+
+        for prefixed in it.chain(*allPrefixed):
+            fullpath = prefixed
+            prefixed = op.relpath(prefixed, workdir)
+
+            for prefPat, prefName in prefixes.items():
+                if not fnmatch.fnmatch(prefixed, '{}*'.format(prefPat)):
+                    continue
+
+                log.debug('Loading prefixed output %s [%s]: %s',
+                          prefPat, prefName, prefixed)
+
+                # if the load function returns
+                # None, this file is probably
+                # not of the correct type.
+                fval = self.__load(fullpath)
+                if fval is not None:
+                    prefixed = self.__removeExt(prefixed)
+                    prefixed = re.sub('^' + prefPat, prefName, prefixed)
+                    result[prefixed] = fval
+                    break
+
+        return result
+
+
+def fileOrImage(*args, **kwargs):
     """Decorator which can be used to ensure that any NIfTI images are saved
     to file, and output images can be loaded and returned as ``nibabel``
     image objects or :class:`.Image` objects.
@@ -682,6 +941,10 @@ def fileOrImage(*imgargs):
         return op.join(workdir, '{}.nii.gz'.format(name))
 
     def load(path):
+
+        if not fslimage.looksLikeImage(path):
+            return None
+
         # create an independent in-memory
         # copy of the image file
         img = nib.load(path)
@@ -701,7 +964,13 @@ def fileOrImage(*imgargs):
             raise RuntimeError('Cannot handle type: {}'.format(intypes))
 
     def decorator(func):
-        fot = _FileOrThing(func, prepIn, prepOut, load, *imgargs)
+        fot = _FileOrThing(func,
+                           prepIn,
+                           prepOut,
+                           load,
+                           fslimage.removeExt,
+                           *args,
+                           **kwargs)
 
         def wrapper(*args, **kwargs):
             result = fot(*args, **kwargs)
@@ -713,7 +982,7 @@ def fileOrImage(*imgargs):
     return decorator
 
 
-def fileOrArray(*arrargs):
+def fileOrArray(*args, **kwargs):
     """Decorator which can be used to ensure that any Numpy arrays are saved
     to text files, and output files can be loaded and returned as Numpy arrays.
     """
@@ -732,10 +1001,18 @@ def fileOrArray(*arrargs):
     def prepOut(workdir, name, val):
         return op.join(workdir, '{}.txt'.format(name))
 
-    load = np.loadtxt
+    def load(path):
+        try:              return np.loadtxt(path)
+        except Exception: return None
 
     def decorator(func):
-        fot = _FileOrThing(func, prepIn, prepOut, load, *arrargs)
+        fot = _FileOrThing(func,
+                           prepIn,
+                           prepOut,
+                           load,
+                           fslpath.removeExt,
+                           *args,
+                           **kwargs)
 
         def wrapper(*args, **kwargs):
             return fot(*args, **kwargs)
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 50188f4deec66b2b89e719e2e5470856eafe5ddd..4b113427907f852c9e4b7c7813eb528e97cbbf43 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -4,4 +4,3 @@ mock==2.*
 coverage==4.*
 pytest==3.*
 pytest-cov==2.*
-pytest-runner>=2.*,<=4.*
diff --git a/setup.py b/setup.py
index 228878ef4cdb47ddfd6791924fb0e6aaf56815fd..5f2eebdfd9e315dbc7db3681743bf2d7c4f5d1b8 100644
--- a/setup.py
+++ b/setup.py
@@ -124,11 +124,12 @@ setup(
 
     entry_points={
         'console_scripts' : [
-            'immv       = fsl.scripts.immv:main',
-            'imcp       = fsl.scripts.imcp:main',
-            'imglob     = fsl.scripts.imglob:main',
-            'atlasq     = fsl.scripts.atlasq:main',
-            'atlasquery = fsl.scripts.atlasq:atlasquery_emulation',
+            'immv          = fsl.scripts.immv:main',
+            'imcp          = fsl.scripts.imcp:main',
+            'imglob        = fsl.scripts.imglob:main',
+            'atlasq        = fsl.scripts.atlasq:main',
+            'atlasquery    = fsl.scripts.atlasq:atlasquery_emulation',
+            'extract_noise = fsl.scripts.extract_noise:main',
         ]
     }
 )
diff --git a/tests/__init__.py b/tests/__init__.py
index d8647089277169add9aead66a38bd911615448e3..eb16fa95ab31bdcce6494524b28a481493cf7cb1 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -11,8 +11,9 @@ import              os
 import              sys
 import              glob
 import              shutil
-import              tempfile
+import              fnmatch
 import              logging
+import              tempfile
 import              contextlib
 import itertools as it
 import os.path   as op
@@ -187,16 +188,26 @@ def make_dummy_image_file(path):
         make_dummy_file(path)
 
 
-def cleardir(dir):
+def cleardir(dir, pat=None):
     """Deletes everything in the given directory, but not the directory
     itself.
     """
     for f in os.listdir(dir):
+
+        if pat is not None and not fnmatch.fnmatch(f, pat):
+            continue
+
         f = op.join(dir, f)
+
         if   op.isfile(f): os.remove(f)
         elif op.isdir(f):  shutil.rmtree(f)
 
 
+def checkdir(dir, *expfiles):
+    for f in expfiles:
+        assert op.exists(op.join(dir, f))
+
+
 def random_voxels(shape, nvoxels=1):
     randVoxels = np.vstack(
         [np.random.randint(0, s, nvoxels) for s in shape[:3]]).T
diff --git a/tests/test_extract_noise.py b/tests/test_extract_noise.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c1e4b8ed7b588b6e68351c3504710293deffbdc
--- /dev/null
+++ b/tests/test_extract_noise.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+#
+# test_extract_noise.py -
+#
+# Author: Paul McCarthy <pauldmccarthy@gmail.com>
+#
+
+import sys
+
+import numpy as np
+
+import pytest
+
+import fsl.utils.tempdir         as tempdir
+import fsl.scripts.extract_noise as extn
+
+
+def test_genComponentIndexList():
+
+    with tempdir.tempdir():
+
+        # sequence of 1-indexed integers/file paths
+        icomps  = [1, 5, 28, 12, 42, 54]
+        fcomps1 = [1, 4, 6, 3, 7]
+        fcomps2 = [12, 42, 31, 1, 4, 8]
+
+        with open('comps1.txt', 'wt') as f:
+            f.write(','.join([str(l) for l in fcomps1]))
+        with open('comps2.txt', 'wt') as f:
+            f.write(','.join([str(l) for l in fcomps2]))
+
+        ncomps   = 60
+        comps    = icomps + ['comps1.txt', 'comps2.txt']
+        expcomps = list(sorted(set(icomps + fcomps1 + fcomps2)))
+        expcomps = [c - 1 for c in expcomps]
+
+        assert extn.genComponentIndexList(comps, ncomps) == expcomps
+
+        with pytest.raises(ValueError):
+            extn.genComponentIndexList(comps + [-1], 60)
+        with pytest.raises(ValueError):
+            extn.genComponentIndexList(comps, 40)
+
+
+def test_loadConfoundFiles():
+    with tempdir.tempdir():
+
+        npts  = 50
+        confs = [
+            np.random.randint(1, 100, (50, 10)),
+            np.random.randint(1, 100, (50, 1)),
+            np.random.randint(1, 100, (50, 5))]
+
+        badconfs = [
+            np.random.randint(1, 100, (40, 10)),
+            np.random.randint(1, 100, (60, 10))]
+
+        expected            = np.empty((50, 16), dtype=np.float64)
+        expected[:, :]      = np.nan
+        expected[:, :10]    = confs[0]
+        expected[:,  10:11] = confs[1]
+        expected[:,  11:16] = confs[2]
+
+        conffiles = []
+        for i, c in enumerate(confs):
+            fname = 'conf{}.txt'.format(i)
+            conffiles.append(fname)
+            np.savetxt(fname, c)
+
+        result = extn.loadConfoundFiles(conffiles, npts)
+        amask  = ~np.isnan(expected)
+
+        assert np.all(~np.isnan(result) == amask)
+        assert np.all(result[amask]     == expected[amask])
+        assert np.all(result[amask]     == expected[amask])
+
+        badconfs = [
+            np.random.randint(1, 100, (40, 10)),
+            np.random.randint(1, 100, (60, 10))]
+        conffiles = []
+        for i, c in enumerate(badconfs):
+            fname = 'conf{}.txt'.format(i)
+            conffiles.append(fname)
+            np.savetxt(fname, c)
+
+        with pytest.raises(ValueError):
+            extn.loadConfoundFiles(conffiles, npts)
+
+
+def test_extract_noise():
+
+    with tempdir.tempdir() as td:
+
+        # (npts, ncomps)
+        melmix = np.random.randint(1, 100, (100, 20))
+        np.savetxt('melodic_mix', melmix)
+
+        sys.argv = ['extract_noise', td] + '-o out.txt 1 2 3'.split()
+        extn.main()
+        assert np.all(np.loadtxt('out.txt') == melmix[:, :3])
+
+        with open('labels.txt', 'wt') as f:
+            f.write('4, 5, 6, 7')
+
+        extn.main([td] + '-o out.txt -ow 1 2 3 labels.txt'.split())
+        assert np.all(np.loadtxt('out.txt') == melmix[:, :7])
+
+        conf1 = np.random.randint(1, 100, (100, 1))
+        conf2 = np.random.randint(1, 100, (100, 5))
+        np.savetxt('conf1.txt', conf1)
+        np.savetxt('conf2.txt', conf2)
+
+        exp = np.hstack((melmix[:, :3], conf1, conf2))
+        extn.main([td] + '-o out.txt -c conf1.txt -c conf2.txt -ow 1 2 3'.split())
+        assert np.all(np.loadtxt('out.txt') == exp)
+
+
+def test_extract_noise_usage():
+
+    with pytest.raises(SystemExit) as e:
+        extn.main([])
+    assert e.value.code == 0
+
+def test_extract_noise_badargs():
+
+    with pytest.raises(SystemExit) as e:
+        extn.main(['non-existent.ica', '1', '2', '3'])
+    assert e.value.code != 0
+
+    with tempdir.tempdir() as td:
+        with pytest.raises(SystemExit) as e:
+            extn.main([td, 'non-existent.txt', '1', '2', '3'])
+        assert e.value.code != 0
+
+        with open('outfile.txt', 'wt') as f:
+            f.write('a')
+
+        # overwrite not specified
+        with pytest.raises(SystemExit) as e:
+            extn.main([td, '-o', 'outfile.txt', '1', '2', '3'])
+        assert e.value.code != 0
+
+        with pytest.raises(SystemExit) as e:
+            extn.main([td, '-c', 'non-existent.txt', '1', '2', '3'])
+        assert e.value.code != 0
+
+
+        # bad data
+        melmix = np.random.randint(1, 100, (100, 5))
+        np.savetxt('melodic_mix', melmix)
+
+        with open('labels.txt', 'wt') as f:
+            f.write('-1, 0, 1, 2')
+
+        with pytest.raises(SystemExit) as e:
+            extn.main([td, 'labels.txt', '1', '2', '3'])
+        assert e.value.code != 0
diff --git a/tests/test_fixlabels.py b/tests/test_fixlabels.py
index 04d1c7edc5d02c6930ca23bbb1237ee8126ad898..a6049d57b2ee45cc42e6b75f0d3b418d37b6121d 100644
--- a/tests/test_fixlabels.py
+++ b/tests/test_fixlabels.py
@@ -35,7 +35,8 @@ filtered_func_data.ica
  ['Unclassified Noise'],
  ['Unclassified Noise'],
  ['Unclassified Noise'],
- ['Signal']]))
+ ['Signal']],
+[2, 5, 6, 7]))
 
 
 goodfiles.append(("""
@@ -92,7 +93,8 @@ REST.ica/filtered_func_data.ica
  ['Unclassified noise'],
  ['Unclassified noise'],
  ['Unclassified noise'],
- ['Unknown']]))
+ ['Unknown']],
+[2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 21, 22, 23, 24]))
 
 goodfiles.append(("""
 [2, 5, 6, 7]
@@ -104,7 +106,8 @@ None,
  ['Signal'],
  ['Unclassified noise'],
  ['Unclassified noise'],
- ['Unclassified noise']]))
+ ['Unclassified noise']],
+[2, 5, 6, 7]))
 
 goodfiles.append(("""
 2, 5, 6, 7
@@ -116,7 +119,8 @@ None,
  ['Unknown'],
  ['Movement'],
  ['Movement'],
- ['Movement']]))
+ ['Movement']],
+[2, 5, 6, 7]))
 
 
 goodfiles.append(("""
@@ -127,11 +131,12 @@ path/to/analysis.ica
 """,
 'path/to/analysis.ica',
 [['Unclassified noise'],
- ['Signal', 'Blob']]))
+ ['Signal', 'Blob']],
+[1]))
 
 def test_loadLabelFile_good():
 
-    for filecontents, expMelDir, expLabels in goodfiles:
+    for filecontents, expMelDir, expLabels, expIdxs in goodfiles:
 
         with tests.testdir() as testdir:
 
@@ -143,9 +148,15 @@ def test_loadLabelFile_good():
                 f.write(filecontents.strip())
 
             resMelDir, resLabels = fixlabels.loadLabelFile(fname)
+            assert resMelDir == expMelDir
+            assert len(resLabels) == len(expLabels)
+            for exp, res in zip(expLabels, resLabels):
+                assert exp == res
 
+            resMelDir, resLabels, resIdxs = fixlabels.loadLabelFile(
+                fname, returnIndices=True)
             assert resMelDir == expMelDir
-            
+            assert resIdxs   == expIdxs
             assert len(resLabels) == len(expLabels)
             for exp, res in zip(expLabels, resLabels):
                 assert exp == res
@@ -309,7 +320,7 @@ def test_loadLabelFile_customLabels():
             if i in included:
                 assert ilbls[0] == incLabel
             else:
-                assert ilbls[0] == excLabel 
+                assert ilbls[0] == excLabel
 
 
 def test_saveLabelFile():
@@ -328,7 +339,7 @@ def test_saveLabelFile():
     4, Label1, True
     5, Unknown, False
     """).strip()
-    
+
     with tests.testdir() as testdir:
         fname = op.join(testdir, 'fname.txt')
 
@@ -344,12 +355,12 @@ def test_saveLabelFile():
         exp = '{}\n{}'.format(dirname, expected)
         with open(fname, 'rt') as f:
             assert f.read().strip() == exp
-            
+
         # dirname=None, listBad=True
         fixlabels.saveLabelFile(labels, fname)
         exp = '.\n{}\n[1, 3, 4]'.format(expected)
-        with open(fname, 'rt') as f: 
-            assert f.read().strip() == exp 
+        with open(fname, 'rt') as f:
+            assert f.read().strip() == exp
 
         # Custom signal labels
         sigLabels = ['Label1']
@@ -364,5 +375,5 @@ def test_saveLabelFile():
         """).strip()
 
         fixlabels.saveLabelFile(labels, fname, signalLabels=sigLabels)
-        with open(fname, 'rt') as f: 
-            assert f.read().strip() == exp 
+        with open(fname, 'rt') as f:
+            assert f.read().strip() == exp
diff --git a/tests/test_freesurfer.py b/tests/test_freesurfer.py
index ff7791ae2265e6c0247e2fb4a66aa49a91a15c61..b0f9bb638bc25758fbd46d6e6683e3a4535e876c 100644
--- a/tests/test_freesurfer.py
+++ b/tests/test_freesurfer.py
@@ -204,6 +204,8 @@ def test_loadVertexData_annot():
         loaded = mesh.loadVertexData(vdfile)
         ergbal, enames = mesh.getVertexDataColourTable(vdfile)
 
+        enames = [n.decode() for n in enames]
+
         assert np.all(np.isclose(loaded, labels.reshape(-1, 1)))
         assert list(enames) == list(names)
         assert np.all(np.isclose(ergbal[:, :4], rgba))
diff --git a/tests/test_fsl_utils_path.py b/tests/test_fsl_utils_path.py
index 9a567396f6ea119cd6488c058fb05fe6e4a231ca..6bb54fcd8b55ab2b78225b5d2d2d26a9e1e8ac2a 100644
--- a/tests/test_fsl_utils_path.py
+++ b/tests/test_fsl_utils_path.py
@@ -110,6 +110,23 @@ def test_shallowest():
         assert fslpath.shallowest(path, suffixes) == output
 
 
+def test_allFiles():
+    create = [
+        'a/1',
+        'a/2',
+        'a/b/1',
+        'a/b/2',
+        'a/b/c/1',
+        'a/b/d/1',
+    ]
+
+    with testdir(create) as td:
+        assert (sorted(fslpath.allFiles('.')) ==
+                sorted([op.join('.', c) for c in create]))
+        assert (sorted(fslpath.allFiles(td))  ==
+                sorted([op.join(td,  c) for c in create]))
+
+
 def test_hasExt():
 
     tests = [
diff --git a/tests/test_image.py b/tests/test_image.py
index d7365f8d49b8ba4ab8d50f81b0b913b7f3e10cb1..34380c802e8d9a476a1b004d660b8a28e06acac7 100644
--- a/tests/test_image.py
+++ b/tests/test_image.py
@@ -30,6 +30,18 @@ from fsl.utils.tempdir import tempdir
 from . import make_random_image
 from . import make_dummy_file
 
+try:
+    from unittest import mock
+except ImportError:
+    import mock
+
+
+try:
+    import indexed_gzip as igzip
+except ImportError:
+    igzip           = mock.MagicMock()
+    igzip.ZranError = mock.MagicMock()
+
 
 def make_image(filename=None,
                imgtype=1,
@@ -116,8 +128,8 @@ def test_load():
     shouldRaise = [('notexist',        fslpath.PathError),
                    ('notexist.nii.gz', fslpath.PathError),
                    ('ambiguous',       fslpath.PathError),
-                   ('notnifti',        ImageFileError),
-                   ('notnifti.nii.gz', ImageFileError)]
+                   ('notnifti',        (ImageFileError, igzip.ZranError)),
+                   ('notnifti.nii.gz', (ImageFileError, igzip.ZranError))]
 
 
     with tempdir() as testdir:
diff --git a/tests/test_tempdir.py b/tests/test_tempdir.py
index 0495deb03ff2964162bd8620f506f447a2ab0913..920997965cdc3c8e6cd1aee2caf2e0cd7fff94c7 100644
--- a/tests/test_tempdir.py
+++ b/tests/test_tempdir.py
@@ -51,3 +51,15 @@ def test_tempdir_changeto():
         assert op.realpath(os.getcwd()) == cwd
 
     assert op.realpath(os.getcwd()) == cwd
+
+
+def test_tempdir_override():
+    with tempdir.tempdir() as parent:
+
+        # tempdir should not create/change to
+        # a new temp directory, but should
+        # stay in the override directory
+        with tempdir.tempdir(override=parent):
+            assert op.realpath(os.getcwd()) == op.realpath(parent)
+        # override should not be deleted
+        assert op.exists(parent)
diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py
index 1e366a166ebbbe62b0ffef025fac13c4e9bfc6d3..af2a95fb6df0c7254ebde60625ccc2264ebdaa2e 100644
--- a/tests/test_wrappers.py
+++ b/tests/test_wrappers.py
@@ -15,14 +15,23 @@ import fsl.utils.run                      as run
 from . import mockFSLDIR
 
 
-def checkResult(cmd, base, args):
+def checkResult(cmd, base, args, stripdir=None):
     """We can't control the order in which command line args are generated,
     so we need to test all possible orderings.
 
-    :arg cmd:  Generated command
-    :arg base: Beginning of expected command
-    :arg args: Sequence of expected arguments
+    :arg cmd:      Generated command
+    :arg base:     Beginning of expected command
+    :arg args:     Sequence of expected arguments
+    :arg stripdir: Sequence of indices indicating arguments
+                   for whihc any leading directory should be ignored.
     """
+
+    if stripdir is not None:
+        cmd = list(cmd.split())
+        for si in stripdir:
+            cmd[si] = op.basename(cmd[si])
+        cmd = ' '.join(cmd)
+
     permutations = it.permutations(args, len(args))
     possible     = [' '.join([base] + list(p))  for p in permutations]
 
@@ -34,7 +43,7 @@ def test_bet():
         bet      = op.join(fsldir, 'bin', 'bet')
         result   = fw.bet('input', 'output', mask=True, c=(10, 20, 30))
         expected = (bet + ' input output', ('-m', '-c 10 20 30'))
-        assert checkResult(result.output[0], *expected)
+        assert checkResult(result.output[0], *expected, stripdir=[2])
 
 
 def test_robustfov():
@@ -263,3 +272,18 @@ def test_fslmaths():
         assert result.output[0] == expected
 
         # TODO test LOAD output
+
+def test_fast():
+    with asrt.disabled(), run.dryrun(), mockFSLDIR() as fsldir:
+
+        cmd = op.join(fsldir, 'bin', 'fast')
+
+        result   = fw.fast('input', 'myseg', n_classes=3)
+        expected = [cmd, '-v', '--out=myseg', '--class=3', 'input']
+
+        assert result.output[0] == ' '.join(expected)
+
+        result   = fw.fast(('in1', 'in2', 'in3'), 'myseg', n_classes=3)
+        expected = [cmd, '-v', '--out=myseg', '--class=3', 'in1', 'in2', 'in3']
+
+        assert result.output[0] == ' '.join(expected)
diff --git a/tests/test_wrapperutils.py b/tests/test_wrapperutils.py
index b5c393e78d92c1360576a1dbe309cb5587898ce7..8173fe1045f5f5aa06a3275e38391064e940c9bb 100644
--- a/tests/test_wrapperutils.py
+++ b/tests/test_wrapperutils.py
@@ -13,6 +13,7 @@ import            textwrap
 try: from unittest import mock
 except ImportError: import mock
 
+import six
 import pytest
 
 import numpy as np
@@ -25,7 +26,7 @@ import fsl.data.image            as fslimage
 import fsl.wrappers.wrapperutils as wutils
 
 
-from . import mockFSLDIR
+from . import mockFSLDIR, cleardir, checkdir
 from .test_run import mock_submit
 
 
@@ -309,25 +310,278 @@ def test_fileOrImage():
         assert np.all(result.get_data()[:] == expected)
 
 
+def test_fileOrThing_sequence():
+
+    @wutils.fileOrArray('arrs', 'out')
+    def func(arrs, out):
+
+        if isinstance(arrs, six.string_types):
+            arrs = [arrs]
+
+        arrs = [np.loadtxt(a) for a in arrs]
+        res  = np.sum(arrs, axis=0)
+
+        np.savetxt(out, res)
+
+    inputs  = [np.random.randint(1, 10, (3, 3)) for i in range(4)]
+    infiles = ['input{}.txt'.format(i) for i in range(len(inputs))]
+    exp     = np.sum(inputs, axis=0)
+
+    with tempdir.tempdir():
+
+        for ifile, idata in zip(infiles, inputs):
+            np.savetxt(ifile, idata)
+
+        func(inputs, 'result.txt')
+        assert np.all(np.loadtxt('result.txt') == exp)
+
+        assert np.all(func(inputs, wutils.LOAD)['out'] == exp)
+
+        func(inputs[0], 'result.txt')
+        assert np.all(np.loadtxt('result.txt') == inputs[0])
+
+        assert np.all(func(inputs[0], wutils.LOAD)['out'] == inputs[0])
+
+        func(infiles, 'result.txt')
+        assert np.all(np.loadtxt('result.txt') == exp)
+
+        assert np.all(func(infiles, wutils.LOAD)['out'] == exp)
+
+        func(infiles[0], 'result.txt')
+        assert np.all(np.loadtxt('result.txt') == inputs[0])
+
+        assert np.all(func(infiles[0], wutils.LOAD)['out'] == inputs[0])
+
+
+def test_fileOrThing_outprefix():
+
+    @wutils.fileOrImage('img', outprefix='output_base')
+    def basefunc(img, output_base):
+        img = nib.load(img).get_data()
+
+        out1 = nib.nifti1.Nifti1Image(img * 5,  np.eye(4))
+        out2 = nib.nifti1.Nifti1Image(img * 10, np.eye(4))
+
+        nib.save(out1, '{}_times5.nii.gz' .format(output_base))
+        nib.save(out2, '{}_times10.nii.gz'.format(output_base))
+
+
+    with tempdir.tempdir() as td:
+        img  = nib.nifti1.Nifti1Image(np.array([[1, 2], [3, 4]]), np.eye(4))
+        exp1 = img.get_data() * 5
+        exp2 = img.get_data() * 10
+        nib.save(img, 'img.nii')
+
+        basefunc('img.nii', 'myout')
+        assert np.all(nib.load('myout_times5.nii.gz') .get_data() == exp1)
+        assert np.all(nib.load('myout_times10.nii.gz').get_data() == exp2)
+        cleardir(td, 'myout*')
+
+        basefunc(img, 'myout')
+        assert np.all(nib.load('myout_times5.nii.gz') .get_data() == exp1)
+        assert np.all(nib.load('myout_times10.nii.gz').get_data() == exp2)
+        cleardir(td, 'myout*')
+
+        res = basefunc(img, 'myout', myout_times5=wutils.LOAD)
+        assert np.all(res['myout_times5'].get_data() == exp1)
+        cleardir(td, 'myout*')
+
+        res = basefunc(img, 'myout', myout_times10=wutils.LOAD)
+        assert np.all(res['myout_times10'].get_data() == exp2)
+        cleardir(td, 'myout*')
+
+        res = basefunc(img, 'myout', myout=wutils.LOAD)
+        assert np.all(res['myout_times5'] .get_data() == exp1)
+        assert np.all(res['myout_times10'].get_data() == exp2)
+        cleardir(td, 'myout*')
+
+
+def test_fileOrThing_outprefix_differentTypes():
+
+    @wutils.fileOrImage('img', outprefix='outpref')
+    def func(img, outpref):
+
+        img  = nib.load(img)
+        img  = nib.nifti1.Nifti1Image(img.get_data() * 2, np.eye(4))
+        text = '1234567890'
+
+        nib.save(img, '{}_image.nii.gz' .format(outpref))
+
+        with open('{}_text.txt'.format(outpref), 'wt') as f:
+            f.write(text)
+
+    with tempdir.tempdir() as td:
+        img  = nib.nifti1.Nifti1Image(np.array([[1, 2], [3, 4]]), np.eye(4))
+        expi = img.get_data() * 2
+        expt = '1234567890'
+
+        func(img, 'myout')
+        assert np.all(nib.load('myout_image.nii.gz') .get_data() == expi)
+        with open('myout_text.txt', 'rt') as f:
+            assert f.read().strip() == expt
+        cleardir(td, 'myout*')
+
+        res = func(img, 'myout', myout_image=wutils.LOAD)
+        assert list(res.keys()) == ['myout_image']
+        assert np.all(res['myout_image'].get_data() == expi)
+        cleardir(td, 'myout*')
+
+        res = func(img, 'myout', myout=wutils.LOAD)
+        assert list(res.keys()) == ['myout_image']
+        assert np.all(res['myout_image'].get_data() == expi)
+        cleardir(td, 'myout*')
+
+        res = func(img, 'myout', myout_text=wutils.LOAD)
+        assert list(res.keys()) == []
+        cleardir(td, 'myout*')
+
+
+def test_fileOrThing_outprefix_directory():
+
+    import logging
+    logging.basicConfig()
+    logging.getLogger('fsl.wrappers').setLevel(logging.DEBUG)
+
+    @wutils.fileOrImage('img', outprefix='outpref')
+    def func(img, outpref):
+        img  = nib.load(img)
+        img2 = nib.nifti1.Nifti1Image(img.get_data() * 2, np.eye(4))
+        img4 = nib.nifti1.Nifti1Image(img.get_data() * 4, np.eye(4))
+
+        outdir = op.abspath('{}_imgs'.format(outpref))
+
+        os.mkdir(outdir)
+
+        nib.save(img2, op.join(outdir, 'img2.nii.gz'))
+        nib.save(img4, op.join(outdir, 'img4.nii.gz'))
+
+    with tempdir.tempdir() as td:
+        img  = nib.nifti1.Nifti1Image(np.array([[1, 2], [3, 4]]), np.eye(4))
+        exp2 = img.get_data() * 2
+        exp4 = img.get_data() * 4
+
+        res = func(img, 'myout')
+        assert len(res) == 0
+        checkdir(td,
+                 op.join('myout_imgs', 'img2.nii.gz'),
+                 op.join('myout_imgs', 'img4.nii.gz'))
+        cleardir(td, 'myout*')
+
+        res = func(img, 'myout', myout_imgs=wutils.LOAD)
+        assert len(res) == 2
+        assert np.all(res['myout_imgs/img2'].get_data() == exp2)
+        assert np.all(res['myout_imgs/img4'].get_data() == exp4)
+
+        res = func(img, 'myout', **{'myout_imgs/img2' : wutils.LOAD})
+        assert len(res) == 1
+        assert np.all(res['myout_imgs/img2'].get_data() == exp2)
+
+        res = func(img, 'myout', **{'myout_imgs/img' : wutils.LOAD})
+        assert len(res) == 2
+        assert np.all(res['myout_imgs/img2'].get_data() == exp2)
+        assert np.all(res['myout_imgs/img4'].get_data() == exp4)
+
+        os.mkdir('foo')
+        res = func(img, 'foo/myout')
+        assert len(res) == 0
+        checkdir(td,
+                 op.join('foo', 'myout_imgs', 'img2.nii.gz'),
+                 op.join('foo', 'myout_imgs', 'img4.nii.gz'))
+        cleardir(td, 'foo')
+
+        os.mkdir('foo')
+        res = func(img, 'foo/myout', **{'foo/myout' : wutils.LOAD})
+        assert len(res) == 2
+        assert np.all(res['foo/myout_imgs/img2'].get_data() == exp2)
+        assert np.all(res['foo/myout_imgs/img4'].get_data() == exp4)
+
+
 def test_chained_fileOrImageAndArray():
-    @wutils.fileOrImage('image')
-    @wutils.fileOrArray('array')
-    def func(image, array):
-        nib.load(image)
-        np.loadtxt(array)
+    @wutils.fileOrImage('image', 'outimage')
+    @wutils.fileOrArray('array', 'outarray')
+    def func(image, array, outimage, outarray):
+        image = nib.load(image)
+        array = np.loadtxt(array)
+
+        outimg = nib.nifti1.Nifti1Image(image.get_data() * 2, np.eye(4))
+
+        np.savetxt(outarray, array * 2)
+        outimg.to_filename(outimage)
 
     image = nib.nifti1.Nifti1Image(np.array([[1,  2], [ 3,  4]]), np.eye(4))
     array = np.array([[5, 6, 7, 8]])
 
+    expimg = nib.nifti1.Nifti1Image(image.get_data() * 2, np.eye(4))
+    exparr = array * 2
+
     with tempdir.tempdir():
 
         nib.save(image, 'image.nii')
         np.savetxt('array.txt', array)
 
-        func('image.nii', 'array.txt')
-        func('image.nii',  array)
-        func( image,      'array.txt')
-        func( image,       array)
+        func('image.nii', 'array.txt', 'outimg.nii', 'outarr.txt')
+        assert np.all(nib.load('outimg.nii').get_data() == expimg.get_data())
+        assert np.all(np.loadtxt('outarr.txt') == exparr)
+
+        func('image.nii', array, 'outimg.nii', 'outarr.txt')
+        assert np.all(nib.load('outimg.nii').get_data() == expimg.get_data())
+        assert np.all(np.loadtxt('outarr.txt') == exparr)
+
+        func( image, 'array.txt', 'outimg.nii', 'outarr.txt')
+        assert np.all(nib.load('outimg.nii').get_data() == expimg.get_data())
+        assert np.all(np.loadtxt('outarr.txt') == exparr)
+
+        func( image, array, 'outimg.nii', 'outarr.txt')
+        assert np.all(nib.load('outimg.nii').get_data() == expimg.get_data())
+        assert np.all(np.loadtxt('outarr.txt') == exparr)
+
+        res = func(image, array, wutils.LOAD, 'outarr.txt')
+        assert np.all(res['outimage'].get_data() == expimg.get_data())
+        assert np.all(np.loadtxt('outarr.txt') == exparr)
+
+        res = func(image, array, 'outimg.nii', wutils.LOAD)
+        assert np.all(nib.load('outimg.nii').get_data() == expimg.get_data())
+        assert np.all(res['outarray'] == exparr)
+
+        res = func(image, array, wutils.LOAD, wutils.LOAD)
+        assert np.all(res['outimage'].get_data() == expimg.get_data())
+        assert np.all(res['outarray'] == exparr)
+
+
+def test_fileOrThing_chained_outprefix():
+
+    import logging
+    logging.basicConfig()
+    logging.getLogger('fsl.wrappers').setLevel(logging.DEBUG)
+
+    @wutils.fileOrImage('image', 'outimage', outprefix='out')
+    @wutils.fileOrArray('array', 'outarray', outprefix='out')
+    def func(image, array, out, **kwa):
+
+        image = nib.load(image)
+        array = np.loadtxt(array)
+
+        outimg = nib.nifti1.Nifti1Image(image.get_data() * 2, np.eye(4))
+        outarr = array * 2
+
+        np.savetxt('{}_array.txt'.format(out), outarr)
+        outimg.to_filename('{}_image.nii'.format(out))
+
+    image = nib.nifti1.Nifti1Image(np.array([[1,  2], [ 3,  4]]), np.eye(4))
+    array = np.array([[5, 6, 7, 8]])
+
+    expimg = nib.nifti1.Nifti1Image(image.get_data() * 2, np.eye(4))
+    exparr = array * 2
+
+    with tempdir.tempdir():
+
+        func(image, array, 'myout')
+        assert np.all(nib.load('myout_image.nii').get_data() == expimg.get_data())
+        assert np.all(np.loadtxt('myout_array.txt') == exparr)
+
+        res = func(image, array, wutils.LOAD)
+        assert np.all(res['out_image'].get_data() == expimg.get_data())
+        assert np.all(res['out_array'] == exparr)
 
 
 def test_cmdwrapper():