Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • paulmc/fslpy
  • ndcn0236/fslpy
  • seanf/fslpy
3 results
Show changes
Showing
with 1721 additions and 107 deletions
#!/usr/bin/env python
#
# imtest.py - Test whether an image file exists or not.
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
"""The ``imtest`` script can be used to test whether an image file exists or
not, without having to know the file suffix (.nii, .nii.gz, etc).
"""
import os.path as op
import sys
import fsl.utils.path as fslpath
# The lists below are defined in the
# fsl.data.image class, but are duplicated
# here for performance (to avoid import of
# nibabel/numpy/etc).
exts = ['.nii.gz', '.nii',
'.img', '.hdr',
'.img.gz', '.hdr.gz',
'.mnc', '.mnc.gz']
"""List of file extensions that are supported by ``imtest``.
"""
groups = [('.hdr', '.img'), ('.hdr.gz', '.img.gz')]
"""List of known image file groups (image/header file pairs). """
def imtest(path):
"""Returns ``True`` if the given image path exists, False otherwise. """
path = fslpath.removeExt(path, exts)
path = op.realpath(path)
# getFileGroup will raise an error
# if the image (including all
# components - i.e. header and
# image) does not exist
try:
fslpath.getFileGroup(path,
allowedExts=exts,
fileGroups=groups,
unambiguous=True)
return True
except fslpath.PathError:
return False
def main(argv=None):
"""Test if an image path exists, and prints ``'1'`` if it does or ``'0'``
if it doesn't.
"""
if argv is None:
argv = sys.argv[1:]
# emulate old fslio/imtest - always return 0
if len(argv) != 1:
print('0')
return 0
if imtest(argv[0]):
print('1')
else:
print('0')
return 0
if __name__ == '__main__':
sys.exit(main())
#!/usr/bin/env python
#
# remove_ext.py - Remove file extensions from NIFTI image paths
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
import sys
import fsl.utils.path as fslpath
usage = """Usage: remove_ext <list of image paths to remove extension from>
""".strip()
# This list is defined in the
# fsl.data.image class, but are duplicated
# here for performance (to avoid import of
# nibabel/numpy/etc).
exts = ['.nii.gz', '.nii',
'.img', '.hdr',
'.img.gz', '.hdr.gz',
'.mnc', '.mnc.gz']
"""List of file extensions that are removed by ``remove_ext``. """
def main(argv=None):
"""Removes file extensions from all paths which are specified on the
command line.
"""
if argv is None:
argv = sys.argv[1:]
if len(argv) < 1:
print(usage)
return 1
removed = []
for path in argv:
removed.append(fslpath.removeExt(path, exts))
print(' '.join(removed))
return 0
if __name__ == '__main__':
sys.exit(main())
#!/usr/bin/env python
#
# resample_image.py - Script to resample an image
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
"""This module defines the ``resample_image`` script, for resampling
a NIfTI image.
"""
import textwrap as tw
import sys
import argparse
import numpy as np
import fsl.utils.parse_data as parse_data
import fsl.utils.image.resample as resample
import fsl.data.image as fslimage
def intlist(val):
"""Turn a string of comma-separated ints into a list of ints. """
return [int(v) for v in val.split(',')]
def floatlist(val):
"""Turn a string of comma-separated floats into a list of floats. """
return [float(v) for v in val.split(',')]
def sanitiseList(parser, vals, img, arg):
"""Make sure that ``vals`` has the same number of elements as ``img`` has
dimensions. Used to sanitise the ``--shape`` and ``--dim`` options.
"""
if vals is None:
return vals
nvals = len(vals)
if nvals < 3:
parser.error('At least three values are '
'required for {}'.format(arg))
if nvals > img.ndim:
parser.error('Input only has {} dimensions - too many values '
'specified for {}'.format(img.ndim, arg))
if nvals < img.ndim:
vals = list(vals) + list(img.shape[nvals:])
return vals
ARGS = {
'input' : ('input',),
'output' : ('output',),
'shape' : ('-s', '--shape'),
'dim' : ('-d', '--dim'),
'reference' : ('-r', '--reference'),
'interp' : ('-i', '--interp'),
'origin' : ('-o', '--origin'),
'dtype' : ('-dt', '--dtype'),
'smooth' : ('-n', '--nosmooth')}
OPTS = {
'input' : dict(type=parse_data.Image),
'output' : dict(type=parse_data.ImageOut),
'reference' : dict(type=parse_data.Image, metavar='IMAGE'),
'shape' : dict(type=intlist, metavar=('X,Y,Z,...')),
'dim' : dict(type=floatlist, metavar=('X,Y,Z,...')),
'interp' : dict(choices=('nearest', 'linear', 'cubic'),
default='linear'),
'origin' : dict(choices=('centre', 'corner'), default='centre'),
'dtype' : dict(choices=('char', 'short', 'int', 'float', 'double')),
'smooth' : dict(dest='smooth', action='store_false')}
HELPS = {
'input' : 'Input image',
'output' : 'Output image',
'shape' : 'Output shape',
'dim' : 'Output voxel dimensions',
'reference' : 'Resample input to the space of this reference image'
'(overrides --origin)',
'interp' : 'Interpolation (default: linear)',
'origin' : 'Resampling origin (default: centre)',
'dtype' : 'Data type (default: data type of input image)',
'smooth' : 'Do not smooth image when downsampling'}
DESC = tw.dedent("""
Resample an image to different dimensions.
""").strip()
DEST_DESC = tw.dedent("""
Specify the resampling destination space using one of the following
options. Note that the --reference option will cause the field-of-view
of the input image to be changed to that of the reference image.
""").strip()
USAGE = 'resample_image (--shape|--dim|--reference) [options] input output'
INTERPS = {'nearest' : 0,
'linear' : 1,
'cubic' : 3}
DTYPES = {'char' : np.uint8,
'short' : np.int16,
'int' : np.int32,
'float' : np.float32,
'double' : np.float64}
def parseArgs(argv):
"""Parses command-line arguments.
:arg argv: Sequence of command-line arguments
:returns: An ``argparse.Namespace`` object containing parsed arguments.
"""
parser = argparse.ArgumentParser(prog='resample_image',
usage=USAGE,
description=DESC)
dest = parser.add_argument_group('Resampling destination', DEST_DESC)
dest = dest.add_mutually_exclusive_group(required=True)
for a in ('input', 'output', 'interp', 'origin',
'dtype', 'smooth'):
parser.add_argument(*ARGS[a], help=HELPS[a], **OPTS[a])
for a in ('shape', 'dim', 'reference'):
dest.add_argument(*ARGS[a], help=HELPS[a], **OPTS[a])
if len(argv) == 0:
parser.print_help()
sys.exit(0)
args = parser.parse_args(argv)
args.interp = INTERPS[ args.interp]
args.dtype = DTYPES.get(args.dtype, args.input.dtype)
args.shape = sanitiseList(parser, args.shape, args.input, 'shape')
args.dim = sanitiseList(parser, args.dim, args.input, 'dim')
if (args.reference is not None) and \
(args.input.ndim > 3) and \
(args.reference.ndim > 3):
print('Reference and image are both >3D - only '
'resampling along the spatial dimensions.')
return args
def main(argv=None):
"""Entry point for ``resample_image``. Parses arguments, resamples the
input image, and saves it to the specified output file.
:arg argv: Sequence of command-line arguments. If not provided, taken
from ``sys.argv``.
"""
if argv is None:
argv = sys.argv[1:]
args = parseArgs(argv)
reskwargs = {
'dtype' : args.dtype,
'order' : args.interp,
'smooth' : args.smooth,
'origin' : args.origin}
# One of these is guaranteed to be set
if args.shape is not None:
func = resample.resample
resargs = (args.input, args.shape)
elif args.dim is not None:
func = resample.resampleToPixdims
resargs = (args.input, args.dim)
elif args.reference is not None:
func = resample.resampleToReference
resargs = (args.input, args.reference)
resampled, xform = func(*resargs, **reskwargs)
if args.reference is None:
hdr = args.input.header
else:
hdr = args.reference.header
xform = None
resampled = fslimage.Image(resampled, xform=xform, header=hdr)
# Adjust the pixdims of the
# higher dimensions if they
# have been resampled
if len(resampled.shape) > 3:
oldPixdim = args.input.pixdim[3:]
oldShape = args.input.shape[ 3:]
newShape = resampled .shape[ 3:]
for i, (p, o, n) in enumerate(zip(oldPixdim, oldShape, newShape), 4):
resampled.header['pixdim'][i] = p * o / n
resampled.save(args.output)
return 0
if __name__ == '__main__':
sys.exit(main())
......@@ -10,6 +10,7 @@
import os
import sys
import glob
import hashlib
import shutil
import fnmatch
import logging
......@@ -20,14 +21,12 @@ import os.path as op
import numpy as np
import nibabel as nib
from six import StringIO
from io import StringIO
try: from unittest import mock
except ImportError: import mock
from unittest import mock
import fsl.data.image as fslimage
from fsl.utils.tempdir import tempdir
from fsl.utils.tempdir import tempdir
from fsl.utils.platform import platform as fslplatform
......@@ -35,7 +34,7 @@ logging.getLogger().setLevel(logging.WARNING)
@contextlib.contextmanager
def mockFSLDIR():
def mockFSLDIR(**kwargs):
oldfsldir = fslplatform.fsldir
oldfsldevdir = fslplatform.fsldevdir
......@@ -45,6 +44,15 @@ def mockFSLDIR():
fsldir = op.join(td, 'fsl')
bindir = op.join(fsldir, 'bin')
os.makedirs(bindir)
for subdir, files in kwargs.items():
subdir = op.join(fsldir, subdir)
if not op.isdir(subdir):
os.makedirs(subdir)
for fname in files:
fname = op.join(subdir, fname)
touch(fname)
if subdir == bindir:
os.chmod(fname, 0o755)
fslplatform.fsldir = fsldir
fslplatform.fsldevdir = None
......@@ -62,7 +70,7 @@ def touch(fname):
pass
class CaptureStdout(object):
class CaptureStdout:
"""Context manager which captures stdout and stderr. """
def __init__(self):
......@@ -81,6 +89,7 @@ class CaptureStdout(object):
sys.stdout = self.__mock_stdout
sys.stderr = self.__mock_stderr
return self
def __exit__(self, *args, **kwargs):
......@@ -139,6 +148,8 @@ def testdir(contents=None, suffix=""):
shutil.rmtree(self.testdir)
return ctx(contents)
testdir.__test__ = False
def make_dummy_files(paths):
"""Creates dummy files for all of the given paths. """
......@@ -278,7 +289,8 @@ def make_mock_feat_analysis(featdir,
copes=True,
zstats=True,
residuals=True,
clustMasks=True):
clustMasks=True,
zfstats=True):
if xform is None:
xform = np.eye(4)
......@@ -311,6 +323,7 @@ def make_mock_feat_analysis(featdir,
data = np.ravel_multi_index(data, shape)
data = data.reshape(list(shape) + [1]).repeat(timepoints, axis=3)
data[..., :] += range(i, i + timepoints)
data = data.astype(np.int32)
img = nib.nifti1.Nifti1Image(data, xform)
......@@ -335,6 +348,11 @@ def make_mock_feat_analysis(featdir,
otherFiles .extend(files)
otherShapes.extend([shape] * len(files))
if zfstats:
files = glob.glob(op.join(featdir, 'stats', 'zfstat*nii.gz'))
otherFiles .extend(files)
otherShapes.extend([shape] * len(files))
if residuals:
files = glob.glob(op.join(featdir, 'stats', 'res4d.nii.gz'))
otherFiles .extend(files)
......@@ -352,6 +370,56 @@ def make_mock_feat_analysis(featdir,
return featdir
def make_mock_melodic_analysis(basedir, shape4D, ntimepoints, xform=None):
if xform is None:
xform = np.eye(4)
ncomps = shape4D[-1]
halftime = int(np.floor(ntimepoints / 2))
os.makedirs(basedir)
make_random_image(op.join(basedir, 'melodic_IC.nii.gz'),
dims=shape4D,
xform=xform)
mix = np.random.randint(1, 255, (ntimepoints, ncomps))
ftmix = np.random.randint(1, 255, (halftime, ncomps))
np.savetxt(op.join(basedir, 'melodic_mix'), mix)
np.savetxt(op.join(basedir, 'melodic_FTmix'), ftmix)
def make_mock_dtifit_analysis(basedir, shape3D, basename='dti', xform=None, tensor=False):
if xform is None:
xform = np.eye(4)
os.makedirs(basedir)
shape4D = tuple(shape3D) + (3,)
def mk(ident, shp):
make_random_image(
op.join(basedir, '{}_{}.nii.gz'.format(basename, ident)),
shp,
xform)
mk('V1', shape4D)
mk('V2', shape4D)
mk('V3', shape4D)
mk('L1', shape3D)
mk('L2', shape3D)
mk('L3', shape3D)
mk('S0', shape3D)
mk('MD', shape3D)
mk('MO', shape3D)
mk('FA', shape3D)
if tensor:
mk('tensor', tuple(shape3D) + (6,))
def make_random_mask(filename, shape, xform, premask=None, minones=1):
"""Make a random binary mask image. """
......@@ -372,3 +440,10 @@ def make_random_mask(filename, shape, xform, premask=None, minones=1):
img.save(filename)
return img
def sha256(filename):
hashobj = hashlib.sha256()
with open(filename, 'rb') as f:
hashobj.update(f.read())
return hashobj.hexdigest()
......@@ -14,8 +14,8 @@ import pytest
import fsl.utils.assertions as assertions
import fsl.utils.tempdir as tempdir
from . import make_random_image
from . import testdir
from fsl.tests import make_random_image
from fsl.tests import testdir
def test_assertFileExists():
......@@ -160,14 +160,14 @@ def test_assertIsMelodicDir():
('analysis.ica', [ 'melodic_mix', 'melodic_FTmix'], False),
('analysis.ica', ['melodic_IC.nii.gz', 'melodic_FTmix'], False),
('analysis.ica', ['melodic_IC.nii.gz', 'melodic_mix'], False),
('analysis', ['melodic_IC.nii.gz', 'melodic_mix', 'melodic_FTmix'], False),
('analysis', ['melodic_oIC.nii.gz', 'melodic_mix', 'melodic_FTmix'], False),
('analysis', ['melodic_IC.nii.gz', 'melodic_mix', 'melodic_FTmix'], True),
('analysis', [ 'melodic_mix', 'melodic_FTmix'], False),
]
for dirname, paths, expected in tests:
with testdir(paths, dirname):
if expected:
assertions.assertIsMelodicDir(dirname)
assertions.assertIsMelodicDir('.')
else:
with pytest.raises(AssertionError):
assertions.assertIsMelodicDir(dirname)
......
......@@ -13,13 +13,15 @@ import os
import os.path as op
import numpy as np
import mock
from unittest import mock
import pytest
import tests
import fsl.utils.transform as transform
import fsl.data.atlases as atlases
import fsl.data.image as fslimage
import fsl.tests as tests
import fsl.utils.image.resample as resample
import fsl.data.atlases as atlases
import fsl.data.image as fslimage
import fsl.transform.affine as affine
datadir = op.join(op.dirname(__file__), 'testdata')
......@@ -39,7 +41,8 @@ dummy_atlas_desc = """<?xml version="1.0" encoding="ISO-8859-1"?>
<header>
<name>{name}</name>
<shortname>{shortname}</shortname>
<type>Label</type>
<type>{atlastype}</type>
{extraheader}
<images>
<imagefile>/{shortname}/{filename}</imagefile>
<summaryimagefile>/{shortname}/My{filename}</summaryimagefile>
......@@ -51,7 +54,8 @@ dummy_atlas_desc = """<?xml version="1.0" encoding="ISO-8859-1"?>
</data>
</atlas>
"""
def _make_dummy_atlas(savedir, name, shortName, filename):
def _make_dummy_atlas(
savedir, name, shortName, filename, atlastype='Label', extraheader=''):
mladir = op.join(savedir, shortName)
mlaxmlfile = op.join(savedir, '{}.xml'.format(shortName))
mlaimgfile = op.join(savedir, shortName, '{}.nii.gz'.format(filename))
......@@ -69,7 +73,9 @@ def _make_dummy_atlas(savedir, name, shortName, filename):
desc = dummy_atlas_desc.format(
name=name,
shortname=shortName,
filename=filename)
filename=filename,
atlastype=atlastype,
extraheader=extraheader)
f.write(desc)
return mlaxmlfile
......@@ -98,6 +104,8 @@ def test_AtlasDescription():
tal = registry.getAtlasDescription('talairach')
cort = registry.getAtlasDescription('harvardoxford-cortical')
assert str(tal) == 'AtlasDescription(talairach)'
assert str(cort) == 'AtlasDescription(harvardoxford-cortical)'
assert tal.atlasID == 'talairach'
assert tal.name == 'Talairach Daemon Labels'
......@@ -139,6 +147,26 @@ def test_AtlasDescription():
registry.getAtlasDescription('non-existent-atlas')
def test_StatisticHeader():
with tests.testdir() as testdir:
hdr = '<statistic>T</statistic>' \
'<units></units>' \
'<precision>3</precision>' \
'<upper>75</upper>'
xmlfile = _make_dummy_atlas(testdir,
'statlas',
'STA',
'StAtlas',
atlastype='Statistic',
extraheader=hdr)
desc = atlases.AtlasDescription(xmlfile, 'StAtlas')
assert desc.atlasType == 'statistic'
assert desc.statistic == 'T'
assert desc.units == ''
assert desc.precision == 3
assert desc.lower == 0
assert desc.upper == 75
def test_add_remove_atlas():
......@@ -224,8 +252,7 @@ def test_load_atlas():
reg = atlases.registry
reg.rescanAtlases()
probatlas = reg.loadAtlas('harvardoxford-cortical',
indexed=True, calcRange=False, loadData=False)
probatlas = reg.loadAtlas('harvardoxford-cortical')
probsumatlas = reg.loadAtlas('harvardoxford-cortical', loadSummary=True)
lblatlas = reg.loadAtlas('talairach')
......@@ -234,13 +261,32 @@ def test_load_atlas():
assert isinstance(lblatlas, atlases.LabelAtlas)
def test_get():
reg = atlases.registry
reg.rescanAtlases()
probatlas = reg.loadAtlas('harvardoxford-cortical')
lblatlas = reg.loadAtlas('talairach')
for atlas in (probatlas, lblatlas):
for idx, label in enumerate(atlas.desc.labels[:10]):
target = probatlas[..., idx] if atlas is probatlas else lblatlas.data == label.value
assert (target == atlas.get(label).data).all()
assert label.name == atlas.get(label).name
assert (target == atlas.get(index=label.index).data).all()
assert (target == atlas.get(value=label.value).data).all()
assert (target == atlas.get(name=label.name).data).all()
if atlas is lblatlas:
target = target * label.value
assert (target == atlas.get(value=label.value, binary=False).data).all()
def test_find():
reg = atlases.registry
reg.rescanAtlases()
probatlas = reg.loadAtlas('harvardoxford-cortical',
indexed=True, calcRange=False, loadData=False)
probatlas = reg.loadAtlas('harvardoxford-cortical')
probsumatlas = reg.loadAtlas('harvardoxford-cortical', loadSummary=True)
lblatlas = reg.loadAtlas('talairach')
......@@ -251,16 +297,31 @@ def test_find():
assert atlas .find(value=label.value) == label
assert atlas .find(index=label.index) == label
assert atlas .find(name=label.name) == label
assert atlas.desc.find(value=label.value) == label
assert atlas.desc.find(index=label.index) == label
assert atlas.desc.find(name=label.name) == label
if atlas is not lblatlas:
# lblatlas has a lot of very similar label names
assert atlas .find(name=label.name[:-2]) == label
assert atlas.desc.find(name=label.name[:-2]) == label
with pytest.raises(ValueError):
atlas.find()
with pytest.raises(ValueError):
atlas.find(index=1, value=1)
with pytest.raises(ValueError):
atlas.find(index=1, name=1)
with pytest.raises(ValueError):
atlas.find(value=1, name=1)
with pytest.raises(IndexError):
atlas.find(index=len(labels))
with pytest.raises(IndexError):
atlas.find(name='InvalidROI')
with pytest.raises(IndexError):
atlas.find(name='')
maxval = max([l.value for l in labels])
with pytest.raises(KeyError):
......@@ -272,8 +333,7 @@ def test_prepareMask():
reg = atlases.registry
reg.rescanAtlases()
probatlas = reg.loadAtlas('harvardoxford-cortical',
indexed=True, loadData=False, calcRange=False)
probatlas = reg.loadAtlas('harvardoxford-cortical')
probsumatlas = reg.loadAtlas('harvardoxford-cortical', loadSummary=True)
lblatlas = reg.loadAtlas('talairach')
......@@ -286,14 +346,14 @@ def test_prepareMask():
np.array(np.random.random(ashape), dtype=np.float32),
xform=atlas.voxToWorldMat)
goodmask2, xf = goodmask1.resample(m2shape)
goodmask2, xf = resample.resample(goodmask1, m2shape)
goodmask2 = fslimage.Image(goodmask2, xform=xf)
wrongdims = fslimage.Image(
np.random.random(list(ashape) + [2]))
wrongspace = fslimage.Image(
np.random.random((20, 20, 20)),
xform=transform.concat(atlas.voxToWorldMat, np.diag([2, 2, 2, 1])))
xform=affine.concat(atlas.voxToWorldMat, np.diag([2, 2, 2, 1])))
with pytest.raises(atlases.MaskError):
atlas.prepareMask(wrongdims)
......
......@@ -11,12 +11,13 @@ import numpy as np
import pytest
import fsl.data.atlases as fslatlases
import fsl.data.image as fslimage
import fsl.utils.transform as transform
import fsl.utils.cache as cache
import fsl.data.atlases as fslatlases
import fsl.data.image as fslimage
import fsl.transform.affine as affine
import fsl.utils.image.resample as resample
import fsl.utils.cache as cache
from . import (testdir, make_random_mask)
from fsl.tests import (testdir, make_random_mask)
pytestmark = pytest.mark.fsltest
......@@ -40,18 +41,16 @@ _atlases = cache.Cache()
def _get_atlas(atlasID, res, summary=False):
atlas = _atlases.get((atlasID, res, summary), default=None)
if atlas is None:
atlas = fslatlases.loadAtlas(atlasID,
loadSummary=summary,
resolution=res)
# We need some atlases to be loaded into memory,
# so we can use boolean-mask-based indexing
if summary or atlasID in ('talairach', 'striatum-structural',
'jhu-labels', 'smatt'):
kwargs = {}
else:
kwargs = {'loadData' : False,
'calcRange' : False,
'indexed' : True}
atlas.data
atlas = fslatlases.loadAtlas(atlasID,
loadSummary=summary,
resolution=res,
**kwargs)
_atlases.put((atlasID, res, summary), atlas)
return atlas
......@@ -85,7 +84,7 @@ def _get_zero_mask(aimg):
elif isinstance(aimg, fslatlases.ProbabilisticAtlas):
# Keep memory usage down
zmask = np.ones(aimg.shape[:3], dtype=np.bool)
zmask = np.ones(aimg.shape[:3], dtype=bool)
for vol in range(aimg.shape[-1]):
zmask = np.logical_and(zmask, aimg[..., vol] == 0)
......@@ -155,7 +154,7 @@ def _gen_coord_voxel_query(atlas, qtype, qin, **kwargs):
dlo = (0, 0, 0)
dhi = atlas.shape
else:
dlo, dhi = transform.axisBounds(atlas.shape, atlas.voxToWorldMat)
dlo, dhi = affine.axisBounds(atlas.shape, atlas.voxToWorldMat)
dlen = [hi - lo for lo, hi in zip(dlo, dhi)]
......@@ -190,7 +189,7 @@ def _gen_coord_voxel_query(atlas, qtype, qin, **kwargs):
coords = np.array(coords, dtype=dtype)
if not voxel:
coords = transform.transform(coords, atlas.voxToWorldMat)
coords = affine.transform(coords, atlas.voxToWorldMat)
return tuple([dtype(c) for c in coords])
......@@ -200,7 +199,7 @@ def _eval_coord_voxel_query(atlas, query, qtype, qin):
voxel = qtype == 'voxel'
if voxel: vx, vy, vz = query
else: vx, vy, vz = transform.transform(query, atlas.worldToVoxMat)
else: vx, vy, vz = affine.transform(query, atlas.worldToVoxMat)
vx, vy, vz = [int(round(v)) for v in [vx, vy, vz]]
......@@ -218,8 +217,8 @@ def _eval_coord_voxel_query(atlas, query, qtype, qin):
elif qin == 'out':
expval = []
assert atlas.proportions( query, voxel=voxel) == expval
assert atlas.coordProportions(query, voxel=voxel) == expval
assert atlas.values( query, voxel=voxel) == expval
assert atlas.coordValues(query, voxel=voxel) == expval
if isinstance(atlas, fslatlases.LabelAtlas): evalLabel()
elif isinstance(atlas, fslatlases.ProbabilisticAtlas): evalProb()
......@@ -257,7 +256,8 @@ def _gen_mask_query(atlas, qtype, qin, maskres):
# aggresively to make sure there
# is no overlap between the different
# resolutions
mask, xform = mask.resample(a.shape[:3], dtype=np.float32, order=1)
mask, xform = resample.resample(
mask, a.shape[:3], dtype=np.float32, order=1)
mask[mask < 1.0] = 0
mask[a_zmask == 0] = 0
......@@ -280,9 +280,10 @@ def _eval_mask_query(atlas, query, qtype, qin):
if maskres == res:
rmask = mask[:]
else:
rmask = mask.resample(atlas.shape[:3], dtype=np.float32, order=0)[0]
rmask = resample.resample(
mask, atlas.shape[:3], dtype=np.float32, order=0)[0]
rmask = np.array(rmask, dtype=np.bool)
rmask = np.array(rmask, dtype=bool)
def evalLabel():
......@@ -341,13 +342,13 @@ def _eval_mask_query(atlas, query, qtype, qin):
if qin == 'out':
with pytest.raises(fslatlases.MaskError):
atlas.maskProportions(mask)
atlas.maskValues(mask)
with pytest.raises(fslatlases.MaskError):
atlas.proportions( mask)
atlas.values( mask)
return
props = atlas. proportions(mask)
props2 = atlas.maskProportions(mask)
props = atlas. values(mask)
props2 = atlas.maskValues(mask)
assert np.all(np.isclose(props, props2))
......
#!/usr/bin/env python
#
# test_bids.py -
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
import json
import os.path as op
import itertools as it
from pathlib import Path
import pytest
from fsl.utils.tempdir import tempdir
import fsl.utils.bids as fslbids
def test_parseFilename():
badtests = ['bad_file.txt']
for test in badtests:
with pytest.raises(ValueError):
fslbids.parseFilename(test)
tests = [
('sub-01_ses-01_t1w.nii.gz',
({'sub' : '01', 'ses' : '01'}, 't1w')),
('a-1_b-2_c-3_d-4_e.json',
({'a' : '1', 'b' : '2', 'c' : '3', 'd' : '4'}, 'e')),
]
for filename, expect in tests:
assert fslbids.parseFilename(filename) == expect
def test_isBIDSDir():
with tempdir():
assert not fslbids.isBIDSDir('.')
with tempdir():
Path('dataset_description.json').touch()
assert fslbids.isBIDSDir('.')
def test_inBIDSDir():
with tempdir():
Path('a/b/c').mkdir(parents=True)
Path('dataset_description.json').touch()
assert fslbids.inBIDSDir(Path('.'))
assert fslbids.inBIDSDir(Path('a'))
assert fslbids.inBIDSDir(Path('a/b'))
assert fslbids.inBIDSDir(Path('a/b/c'))
with tempdir():
Path('a/b/c').mkdir(parents=True)
assert not fslbids.inBIDSDir(Path('.'))
assert not fslbids.inBIDSDir(Path('a'))
assert not fslbids.inBIDSDir(Path('a/b'))
assert not fslbids.inBIDSDir(Path('a/b/c'))
def test_isBIDSFile():
goodfiles = [
Path('sub-01_ses-01_t1w.nii.gz'),
Path('sub-01_ses-01_t1w.nii'),
Path('sub-01_ses-01_t1w.json'),
Path('a-1_b-2_c-3_d-4_e.nii.gz'),
Path('sub-01_ses-01_t1w.txt'),
]
badfiles = [
Path('sub-01_ses-01.nii.gz'),
Path('sub-01_ses-01_t1w'),
Path('sub-01_ses-01_t1w.'),
Path('sub_ses-01_t1w.nii.gz'),
Path('sub-01_ses_t1w.nii.gz'),
]
with tempdir():
Path('dataset_description.json').touch()
for f in goodfiles: assert fslbids.isBIDSFile(f)
for f in badfiles: assert not fslbids.isBIDSFile(f)
with tempdir():
for f in it.chain(goodfiles, badfiles):
assert not fslbids.isBIDSFile(f)
def test_loadMetadata():
dd = Path('dataset_description.json')
t1 = Path('sub-01/func/sub-01_task-stim_bold.nii.gz')
json1 = Path('sub-01/func/sub-01_task-stim_bold.json')
json2 = Path('sub-01/sub-01_bold.json')
json3 = Path('sub-01_t1w.json')
json4 = Path('sub-01/task-stim_bold.json')
meta1 = {'a' : '1', 'b' : '2'}
meta2 = {'a' : '10', 'c' : '3'}
meta3 = {'a' : '109', 'b' : '99'}
meta4 = {'c' : '9', 'd' : '5'}
with tempdir():
dd.touch()
Path(op.dirname(t1)).mkdir(parents=True)
t1.touch()
assert fslbids.loadMetadata(t1) == {}
json1.write_text(json.dumps(meta1))
assert fslbids.loadMetadata(t1) == meta1
json2.write_text(json.dumps(meta2))
assert fslbids.loadMetadata(t1) == {**meta2, **meta1}
json3.write_text(json.dumps(meta3))
assert fslbids.loadMetadata(t1) == {**meta2, **meta1}
json4.write_text(json.dumps(meta4))
assert fslbids.loadMetadata(t1) == {**meta4, **meta2, **meta1}
def test_loadMetadata_control_characters():
dd = Path('dataset_description.json')
t1 = Path('sub-01/func/sub-01_task-stim_bold.nii.gz')
json1 = Path('sub-01/func/sub-01_task-stim_bold.json')
meta1 = {"a" : "1", "b" : "2\x19\x20"}
smeta1 = '{"a" : "1", "b" : "2\x19\x20"}'
with tempdir():
dd.touch()
Path(op.dirname(t1)).mkdir(parents=True)
t1.touch()
assert fslbids.loadMetadata(t1) == {}
json1.write_text(smeta1)
assert fslbids.loadMetadata(t1) == meta1
def test_loadMetadata_symlinked():
ddreal = Path('a')
t1real = Path('b')
j1real = Path('c')
j2real = Path('d')
j3real = Path('e')
j4real = Path('f')
dd = Path('data/dataset_description.json')
t1 = Path('data/sub-01/func/sub-01_task-stim_bold.nii.gz')
json1 = Path('data/sub-01/func/sub-01_task-stim_bold.json')
json2 = Path('data/sub-01/sub-01_bold.json')
json3 = Path('data/sub-01_t1w.json')
json4 = Path('data/sub-01/task-stim_bold.json')
meta1 = {'a' : '1', 'b' : '2'}
meta2 = {'a' : '10', 'c' : '3'}
meta3 = {'a' : '109', 'b' : '99'}
meta4 = {'c' : '9', 'd' : '5'}
with tempdir():
ddreal.touch()
t1real.touch()
j1real.write_text(json.dumps(meta1))
j2real.write_text(json.dumps(meta2))
j3real.write_text(json.dumps(meta3))
j4real.write_text(json.dumps(meta4))
Path(op.dirname(t1)).mkdir(parents=True)
dd .symlink_to(op.join('..', ddreal))
t1 .symlink_to(op.join('..', '..', '..', t1real))
json1.symlink_to(op.join('..', '..', '..', j1real))
json2.symlink_to(op.join('..', '..', j2real))
json3.symlink_to(op.join('..', j3real))
json4.symlink_to(op.join('..', '..', j4real))
assert fslbids.loadMetadata(t1) == {**meta4, **meta2, **meta1}
#!/usr/bin/env python
#
# test_bitmap.py -
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
import numpy as np
import pytest
import fsl.utils.tempdir as tempdir
import fsl.data.bitmap as fslbmp
@pytest.mark.piltest
def test_bitmap():
from PIL import Image
nchannels = (1, 3, 4)
with tempdir.tempdir():
for nch in nchannels:
data = np.random.randint(0, 255, (100, 200, nch), dtype=np.uint8)
img = Image.fromarray(data.squeeze())
fname = 'image.png'
img.save(fname)
bmp1 = fslbmp.Bitmap(fname)
bmp2 = fslbmp.Bitmap(data)
assert bmp1.name == fname
assert bmp1.dataSource == fname
assert bmp1.shape == (200, 100, nch)
assert bmp2.shape == (200, 100, nch)
repr(bmp1)
hash(bmp1)
assert np.all(bmp1.data == np.fliplr(data.transpose(1, 0, 2)))
assert np.all(bmp2.data == np.fliplr(data.transpose(1, 0, 2)))
@pytest.mark.piltest
def test_bitmap_asImage():
from PIL import Image
with tempdir.tempdir():
d3 = np.random.randint(0, 255, (100, 200, 3), dtype=np.uint8)
d4 = np.random.randint(0, 255, (100, 200, 4), dtype=np.uint8)
img3 = Image.fromarray(d3, mode='RGB')
img4 = Image.fromarray(d4, mode='RGBA')
img1 = img3.convert(mode='P')
img3.save('rgb.png')
img4.save('rgba.png')
img1.save('p.png')
bmp3 = fslbmp.Bitmap('rgb.png')
bmp4 = fslbmp.Bitmap('rgba.png')
bmp1 = fslbmp.Bitmap('p.png')
i3 = bmp3.asImage()
i4 = bmp4.asImage()
i1 = bmp1.asImage()
assert i3.shape == (200, 100, 1)
assert i4.shape == (200, 100, 1)
assert i1.shape == (200, 100, 1)
assert i3.nvals == 3
assert i4.nvals == 4
assert i1.nvals == 3
......@@ -113,7 +113,55 @@ def test_expiry():
with pytest.raises(cache.Expired):
c.get(0)
with pytest.raises(cache.Expired):
c.get(1)
assert c.get(1, default='default') == 'default'
# And that the cache is empty
assert len(c) == 0
def test_lru():
c = cache.Cache(maxsize=3, lru=True)
c[0] = '0'
c[1] = '1'
c[2] = '2'
c[3] = '3'
# normal behaviour - first inserted
# is dropped
with pytest.raises(KeyError):
assert c.get(0)
# lru behaviour - oldest accessed is
# dropped
c[1]
c[4] = '4'
with pytest.raises(KeyError):
c[2]
c[1]
c[3]
c[4]
assert len(c) == 3
def test_accessors():
c = cache.Cache(maxsize=3)
c[0] = '0'
c[1] = '1'
c[2] = '2'
c[3] = '3'
assert list(c.keys()) == [ 1, 2, 3]
assert list(c.values()) == ['1', '2', '3']
assert list(c.items()) == [(1, '1'), (2, '2'), (3, '3')]
assert 0 not in c
assert 1 in c
assert 2 in c
assert 3 in c
from fsl.data import cifti
import os.path as op
import numpy as np
import nibabel as nib
from numpy import testing
import fsl.tests as tests
from nibabel.cifti2 import cifti2_axes
def volumetric_brain_model():
mask = np.random.randint(2, size=(10, 10, 10)) > 0
return cifti2_axes.BrainModelAxis.from_mask(mask, affine=np.eye(4))
def surface_brain_model():
mask = np.random.randint(2, size=100) > 0
return cifti2_axes.BrainModelAxis.from_mask(mask, name='cortex')
def volumetric_parcels(return_mask=False):
mask = np.random.randint(5, size=(10, 10, 10))
axis = cifti2_axes.ParcelsAxis(
[f'vol_{idx}' for idx in range(1, 5)],
voxels=[np.stack(np.where(mask == idx), axis=-1) for idx in range(1, 5)],
vertices=[{} for _ in range(1, 5)],
volume_shape=mask.shape,
affine=np.eye(4),
)
if return_mask:
return axis, mask
else:
return axis
def surface_parcels(return_mask=False):
mask = np.random.randint(5, size=100)
axis = cifti2_axes.ParcelsAxis(
[f'surf_{idx}' for idx in range(1, 5)],
voxels=[np.zeros((0, 3), dtype=int) for _ in range(1, 5)],
vertices=[{'CIFTI_STRUCTURE_CORTEX': np.where(mask == idx)[0]} for idx in range(1, 5)],
nvertices={'CIFTI_STRUCTURE_CORTEX': 100},
)
if return_mask:
return axis, mask
else:
return axis
def gen_data(axes):
return np.random.randn(*(5 if ax is None else len(ax) for ax in axes))
def test_read_gifti():
testdir = op.join(op.dirname(__file__), 'testdata')
shapefile = op.join(testdir, 'example.shape.gii')
ref_data = nib.load(shapefile).darrays[0].data
data = cifti.load(shapefile)
assert isinstance(data, cifti.DenseCifti)
assert data.arr.shape == (642, )
testing.assert_equal(data.arr, ref_data)
testing.assert_equal(data.brain_model_axis.vertex, np.arange(642))
assert len(data.brain_model_axis.nvertices) == 1
assert data.brain_model_axis.nvertices['CIFTI_STRUCTURE_OTHER'] == 642
data = cifti.load(shapefile, mask_values=(ref_data[0], ))
assert isinstance(data, cifti.DenseCifti)
assert data.arr.shape == (np.sum(ref_data != ref_data[0]), )
testing.assert_equal(data.arr, ref_data[ref_data != ref_data[0]])
testing.assert_equal(data.brain_model_axis.vertex, np.where(ref_data != ref_data[0])[0])
assert len(data.brain_model_axis.nvertices) == 1
assert data.brain_model_axis.nvertices['CIFTI_STRUCTURE_OTHER'] == 642
cifti.load(op.join(testdir, 'example'))
def test_read_nifti():
mask = np.random.randint(2, size=(10, 10, 10)) > 0
values = np.random.randn(10, 10, 10)
for mask_val in (0, np.nan):
values[~mask] = mask_val
affine = np.concatenate((np.random.randn(3, 4), np.array([0, 0, 0, 1])[None, :]), axis=0)
with tests.testdir():
nib.Nifti1Image(values, affine).to_filename("masked_image.nii.gz")
data = cifti.load("masked_image")
assert isinstance(data, cifti.DenseCifti)
testing.assert_equal(data.arr, values[mask])
testing.assert_allclose(data.brain_model_axis.affine, affine)
assert len(data.brain_model_axis.nvertices) == 0
def check_io(data: cifti.Cifti, extension):
with tests.testdir():
data.save("test")
assert op.isfile(f'test.{extension}.nii')
loaded = cifti.load("test")
if data.arr.ndim == 1:
testing.assert_equal(data.arr, loaded.arr[0])
assert data.axes == loaded.axes[1:]
else:
testing.assert_equal(data.arr, loaded.arr)
assert data.axes == loaded.axes
def test_io_cifti():
for cifti_class, cifti_type, main_axis_options in (
(cifti.DenseCifti, 'd', (volumetric_brain_model(), surface_brain_model(),
volumetric_brain_model() + surface_brain_model())),
(cifti.ParcelCifti, 'p', (volumetric_parcels(), surface_parcels(),
volumetric_parcels() + surface_parcels())),
):
for main_axis in main_axis_options:
with tests.testdir():
data_1d = cifti_class(gen_data([main_axis]), [main_axis])
check_io(data_1d, f'{cifti_type}scalar')
connectome = cifti_class(gen_data([main_axis, main_axis]), (main_axis, main_axis))
check_io(connectome, f'{cifti_type}conn')
scalar_axis = cifti2_axes.ScalarAxis(['A', 'B', 'C'])
scalar = cifti_class(gen_data([scalar_axis, main_axis]), (scalar_axis, main_axis))
check_io(scalar, f'{cifti_type}scalar')
label_axis = cifti2_axes.LabelAxis(['A', 'B', 'C'], {1: ('some parcel', (1, 0, 0, 1))})
label = cifti_class(gen_data([label_axis, main_axis]), (label_axis, main_axis))
check_io(label, f'{cifti_type}label')
series_axis = cifti2_axes.SeriesAxis(10, 3, 50, unit='HERTZ')
series = cifti_class(gen_data([series_axis, main_axis]), (series_axis, main_axis))
check_io(series, f'{cifti_type}tseries')
if cifti_type == 'd':
parcel_axis = surface_parcels()
dpconn = cifti_class(gen_data([parcel_axis, main_axis]), (parcel_axis, main_axis))
check_io(dpconn, 'dpconn')
else:
dense_axis = surface_brain_model()
pdconn = cifti_class(gen_data([dense_axis, main_axis]), (dense_axis, main_axis))
check_io(pdconn, 'pdconn')
def test_extract_dense():
vol_bm = volumetric_brain_model()
surf_bm = surface_brain_model()
for bm in (vol_bm + surf_bm, surf_bm + vol_bm):
for ndim, no_other_axis in ((1, True), (2, False), (2, True)):
if ndim == 1:
data = cifti.DenseCifti(gen_data([bm]), [bm])
else:
scl = cifti2_axes.ScalarAxis(['A', 'B', 'C'])
data = cifti.DenseCifti(gen_data([scl, bm]),
[None if no_other_axis else scl, bm])
# extract volume
ref_arr = data.arr[..., data.brain_model_axis.volume_mask]
vol_image = data.to_image(fill=np.nan)
if ndim == 1:
assert vol_image.shape == data.brain_model_axis.volume_shape
else:
assert vol_image.shape == data.brain_model_axis.volume_shape + (3, )
assert np.isfinite(vol_image.data).sum() == len(vol_bm) * (3 if ndim == 2 else 1)
testing.assert_equal(vol_image.data[tuple(vol_bm.voxel.T)], ref_arr.T)
from_image = cifti.DenseCifti.from_image(vol_image)
assert from_image.brain_model_axis == vol_bm
testing.assert_equal(from_image.arr, ref_arr)
# extract surface
ref_arr = data.arr[..., data.brain_model_axis.surface_mask]
mask, surf_data = data.surface('cortex', partial=True)
assert surf_data.shape[-1] < 100
testing.assert_equal(ref_arr, surf_data)
testing.assert_equal(surf_bm.vertex, mask)
surf_data_full = data.surface('cortex', fill=np.nan)
assert surf_data_full.shape[-1] == 100
mask_full = np.isfinite(surf_data_full)
if ndim == 2:
assert (mask_full.any(0) == mask_full.all(0)).all()
mask_full = mask_full[0]
assert mask_full.sum() == len(surf_bm)
assert mask_full[..., mask].sum() == len(surf_bm)
testing.assert_equal(surf_data_full[..., mask_full], ref_arr)
def test_extract_parcel():
vol_parcel, vol_mask = volumetric_parcels(return_mask=True)
surf_parcel, surf_mask = surface_parcels(return_mask=True)
parcel = vol_parcel + surf_parcel
for ndim, no_other_axis in ((1, True), (2, False), (2, True)):
if ndim == 1:
data = cifti.ParcelCifti(gen_data([parcel]), [parcel])
else:
scl = cifti2_axes.ScalarAxis(['A', 'B', 'C'])
data = cifti.ParcelCifti(gen_data([scl, parcel]),
[None if no_other_axis else scl, parcel])
# extract volume
vol_image = data.to_image(fill=np.nan)
if ndim == 1:
assert vol_image.shape == data.parcel_axis.volume_shape
else:
assert vol_image.shape == data.parcel_axis.volume_shape + (3, )
assert np.isfinite(vol_image.data).sum() == np.sum(vol_mask != 0) * (3 if ndim == 2 else 1)
if ndim == 1:
testing.assert_equal(vol_mask != 0, np.isfinite(vol_image.data))
for idx in range(1, 5):
testing.assert_allclose(vol_image.data[vol_mask == idx], data.arr[..., idx - 1])
else:
for idx in range(3):
testing.assert_equal(vol_mask != 0, np.isfinite(vol_image.data[..., idx]))
for idx2 in range(1, 5):
testing.assert_allclose(vol_image.data[vol_mask == idx2, idx], data.arr[idx, idx2 - 1])
# extract surface
mask, surf_data = data.surface('cortex', partial=True)
assert surf_data.shape[-1] == (surf_mask != 0).sum()
assert (surf_mask[mask] != 0).all()
print(data.arr)
for idx in range(1, 5):
if ndim == 1:
testing.assert_equal(surf_data.T[surf_mask[mask] == idx], data.arr[idx + 3])
else:
for idx2 in range(3):
testing.assert_equal(surf_data.T[surf_mask[mask] == idx, idx2], data.arr[idx2, idx + 3])
surf_data_full = data.surface('cortex', partial=False)
assert surf_data_full.shape[-1] == 100
if ndim == 1:
testing.assert_equal(np.isfinite(surf_data_full), surf_mask != 0)
for idx in range(1, 5):
testing.assert_equal(surf_data_full.T[surf_mask == idx], data.arr[idx + 3])
else:
for idx2 in range(3):
testing.assert_equal(np.isfinite(surf_data_full)[idx2], (surf_mask != 0))
for idx in range(1, 5):
testing.assert_equal(surf_data_full.T[surf_mask == idx, idx2], data.arr[idx2, idx + 3])
def test_brainstructure():
for primary in ['cortex', 'cerebellum']:
for secondary in [None, 'white', 'pial']:
for gtype in [None, 'volume', 'surface']:
for orientation in ['left', 'right', 'both']:
bst = cifti.BrainStructure(primary, secondary, orientation, gtype)
print(bst.cifti)
assert bst.cifti == 'CIFTI_STRUCTURE_%s%s' % (primary.upper(), '' if orientation == 'both' else '_' + orientation.upper())
assert bst.gifti['AnatomicalStructurePrimary'][:len(primary)] == primary.capitalize()
assert len(bst.gifti) == (1 if secondary is None else 2)
if secondary is not None:
assert bst.gifti['AnatomicalStructureSecondary'] == secondary.capitalize()
assert bst == cifti.BrainStructure(primary, secondary, orientation, gtype)
assert bst == bst
assert bst != cifti.BrainStructure('Thalamus', secondary, orientation, gtype)
if secondary is None:
assert bst == cifti.BrainStructure(primary, 'midplane', orientation, gtype)
else:
assert bst != cifti.BrainStructure(primary, 'midplane', orientation, gtype)
if (gtype == 'volume' and primary == 'cortex') or (gtype == 'surface' and primary != 'cortex'):
assert cifti.BrainStructure.from_string(bst.cifti) != bst
else:
assert cifti.BrainStructure.from_string(bst.cifti) == bst
assert cifti.BrainStructure.from_string(bst.cifti).secondary is None
#!/usr/bin/env python
import warnings
import pytest
import fsl.utils.deprecated as deprecated
# these get updated in the relevant functions
WARNING_LINE_NUMBER = None
DEPRECATED_LINE_NUMBER = None
def _linenum(pattern):
with open(__file__, 'rt') as f:
for i, line in enumerate(f.readlines(), 1):
if pattern in line:
return i
return -1
def emit_warning():
deprecated.warn('blag', vin='1.0.0', rin='2.0.0', msg='yo')
global WARNING_LINE_NUMBER
WARNING_LINE_NUMBER = _linenum('deprecated.warn(\'blag\'')
@deprecated.deprecated(vin='1.0.0', rin='2.0.0', msg='yo')
def depfunc():
pass
def call_dep_func():
depfunc() # mark
global DEPRECATED_LINE_NUMBER
DEPRECATED_LINE_NUMBER = _linenum('depfunc() # mark')
def _check_warning(w, name, lineno):
assert issubclass(w.category, DeprecationWarning)
assert '{} is deprecated'.format(name) in str(w.message)
assert 'test_deprecated.py' in str(w.filename)
assert w.lineno == lineno
def test_warn():
deprecated.resetWarningCache()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
emit_warning()
assert len(w) == 1
_check_warning(w[0], 'blag', WARNING_LINE_NUMBER)
# warning should only be emitted once
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
emit_warning()
assert len(w) == 0
def test_deprecated():
deprecated.resetWarningCache()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
call_dep_func()
assert len(w) == 1
_check_warning(w[0], 'depfunc', DEPRECATED_LINE_NUMBER)
# warning should only be emitted once
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
call_dep_func()
assert len(w) == 0
#!/usr/bin/env python
#
# These tests require an internet connection, and will only work on linux.
#
import os.path as op
import os
import functools as ft
import subprocess as sp
import tarfile
import zipfile
import random
import string
import binascii
import contextlib
import urllib.request as request
from unittest import mock
import pytest
import fsl.data.dicom as fsldcm
import fsl.utils.tempdir as tempdir
from fsl.tests import mockFSLDIR, touch
datadir = op.join(op.dirname(__file__), 'testdata')
pytestmark = pytest.mark.dicomtest
@contextlib.contextmanager
def install_dcm2niix(version='1.0.20220720'):
filenames = {
'1.0.20201102' : 'v1.0.20201102/dcm2niix_lnx.zip',
'1.0.20190902' : 'v1.0.20190902/dcm2niix_lnx.zip',
'1.0.20181125' : 'v1.0.20181125/dcm2niix_25-Nov-2018_lnx.zip',
'1.0.20171017' : 'v1.0.20171017/dcm2niix_18-Oct-2017_lnx.zip',
'1.0.20220720' : 'v1.0.20220720/dcm2niix_lnx.zip',
}
prefix = 'https://github.com/rordenlab/dcm2niix/releases/download/'
url = prefix + filenames.get(version, f'v{version}/dcm2niix_lnx.zip')
with tempdir.tempdir() as td:
request.urlretrieve(url, 'dcm2niix.zip')
with zipfile.ZipFile('dcm2niix.zip', 'r') as f:
f.extractall('.')
os.chmod(op.join(td, 'dcm2niix'), 0o755)
path = op.abspath('dcm2niix')
with mock.patch('fsl.data.dicom.dcm2niix', return_value=path):
try:
yield
finally:
fsldcm.installedVersion.invalidate()
def test_disabled():
with mock.patch('fsl.data.dicom.enabled', return_value=False):
with pytest.raises(RuntimeError):
fsldcm.scanDir('.')
with pytest.raises(RuntimeError):
fsldcm.loadSeries({})
def test_dcm2niix():
"""
"""
env = os.environ.copy()
env.pop('FSLDIR', None)
with tempdir.tempdir() as td:
env['PATH'] = td
with mock.patch('os.environ', env):
assert fsldcm.dcm2niix() == 'dcm2niix'
bindir = op.join(td, 'bin')
dcm2niix = op.join(bindir, 'dcm2niix')
os.makedirs(bindir)
touch(dcm2niix)
os.chmod(dcm2niix, 0o755)
env['PATH'] = bindir
with mock.patch('os.environ', env):
assert fsldcm.dcm2niix() == dcm2niix
with mockFSLDIR(bin=['dcm2niix']) as fsldir:
env['FSLDIR'] = fsldir
dcm2niix = op.join(fsldir, 'bin', 'dcm2niix')
with mock.patch('os.environ', env):
assert fsldcm.dcm2niix() == dcm2niix
def test_installedVersion():
tests = [
('1.0.20190902', (1, 0, 2019, 9, 2)),
('1.0.20181125', (1, 0, 2018, 11, 25)),
('1.0.20171017', (1, 0, 2017, 10, 17))]
for version, expect in tests:
fsldcm.installedVersion.invalidate()
with install_dcm2niix(version):
got = fsldcm.installedVersion()
assert got == expect
def test_enabled():
try:
with install_dcm2niix('1.0.20190902'):
fsldcm.installedVersion.invalidate()
assert fsldcm.enabled()
# test dcm2niix not present
with mock.patch('subprocess.check_output',
side_effect=Exception()):
fsldcm.installedVersion.invalidate()
assert not fsldcm.enabled()
# test presence of different versions
tests = [(b'version v2.1.20191212', True),
(b'version v1.0.20190902', True),
(b'version v1.0.20171216', True),
(b'version v1.0.20171215', True),
(b'version v1.0.20171214', False),
(b'version v1.0.20160930', False),
(b'version v1.0.20160929', False),
(b'version v0.0.00000000', False),
(b'version blurgh', False)]
for verstr, expected in tests:
fsldcm.installedVersion.invalidate()
with mock.patch('subprocess.check_output', return_value=verstr):
assert fsldcm.enabled() == expected
finally:
fsldcm.installedVersion.invalidate()
def test_scanDir():
with install_dcm2niix():
series = fsldcm.scanDir('.')
assert len(series) == 0
datafile = op.join(datadir, 'example_dicom.tbz2')
with tarfile.open(datafile) as f:
f.extractall(filter='data')
series = fsldcm.scanDir('.')
assert len(series) == 2
for s in series:
assert s['PatientName'] in ('MCCARTHY_PAUL',
'MCCARTHY^PAUL',
'MCCARTHY_PAUL_2',
'MCCARTHY^PAUL^2')
def test_sersiesCRC():
RANDOM = object()
tests = [
({'SeriesInstanceUID' : 'hello-world'}, '2983461467'),
({'SeriesInstanceUID' : RANDOM, 'EchoNumber' : 0}, RANDOM),
({'SeriesInstanceUID' : RANDOM, 'EchoNumber' : 1}, RANDOM),
({'SeriesInstanceUID' : RANDOM, 'EchoNumber' : 2}, RANDOM),
({'SeriesInstanceUID' : RANDOM, 'EchoNumber' : 3}, RANDOM),
]
for series, expect in tests:
series = dict(series)
if expect is RANDOM:
expect = ''.join([random.choice(string.ascii_letters + string.digits)
for i in range(30)])
series['SeriesInstanceUID'] = expect
expect = str(binascii.crc32(expect.encode()))
echo = series.get('EchoNumber', 0)
if echo > 1:
expect += '.{}'.format(echo)
assert fsldcm.seriesCRC(series) == expect
def test_loadSeries():
# test a pre-CRC and a post-CRC version
for version in ('1.0.20181125', '1.0.20201102'):
with install_dcm2niix(version):
datafile = op.join(datadir, 'example_dicom.tbz2')
with tarfile.open(datafile) as f:
f.extractall()
dcmdir = os.getcwd()
series = fsldcm.scanDir(dcmdir)
expShape = (512, 512, 1)
for s in series:
imgs = fsldcm.loadSeries(s)
for img in imgs:
assert img.dicomDir == dcmdir
assert img.shape == expShape
assert img[:].shape == expShape
assert img.getMeta('PatientName') in ('MCCARTHY_PAUL',
'MCCARTHY^PAUL',
'MCCARTHY_PAUL_2',
'MCCARTHY^PAUL^2')
assert 'PatientName' in img.metaKeys()
assert 'MCCARTHY_PAUL' in img.metaValues() or \
'MCCARTHY^PAUL' in img.metaValues() or \
'MCCARTHY_PAUL_2' in img.metaValues() or \
'MCCARTHY^PAUL^2' in img.metaValues()
assert ('PatientName', 'MCCARTHY_PAUL') in img.metaItems() or \
('PatientName', 'MCCARTHY^PAUL') in img.metaItems() or \
('PatientName', 'MCCARTHY_PAUL_2') in img.metaItems() or \
('PatientName', 'MCCARTHY^PAUL^2') in img.metaItems()
......@@ -10,7 +10,7 @@ import numpy as np
import pytest
import tests
import fsl.tests as tests
import fsl.data.dtifit as dtifit
import fsl.data.image as fslimage
......@@ -137,6 +137,23 @@ def test_decomposeTensorMatrix():
assert np.all(np.isclose(resvec, expvec)) or \
np.all(np.isclose(resvec, -expvec))
assert np.allclose(
dtifit.eigendecompositionToComponents(expV1, expV2, expV3, expL1, expL2, expL3),
tensorMatrices
)
random_tensor = np.random.randn(6)
assert np.allclose(
random_tensor,
dtifit.eigendecompositionToComponents(*dtifit.componentsToEigendecomposition(random_tensor))
)
random_tensor = np.random.randn(2, 2, 3, 6)
assert np.allclose(
random_tensor,
dtifit.eigendecompositionToComponents(*dtifit.componentsToEigendecomposition(random_tensor))
)
def test_DTIFitTensor():
......@@ -152,12 +169,12 @@ def test_DTIFitTensor():
l2file = op.join(testdir, 'dti_L2.nii')
l3file = op.join(testdir, 'dti_L3.nii')
v1 = tests.make_random_image(v1file, (5, 5, 5, 3)).get_data()
v2 = tests.make_random_image(v2file, (5, 5, 5, 3)).get_data()
v3 = tests.make_random_image(v3file, (5, 5, 5, 3)).get_data()
l1 = tests.make_random_image(l1file, (5, 5, 5)) .get_data()
l2 = tests.make_random_image(l2file, (5, 5, 5)) .get_data()
l3 = tests.make_random_image(l3file, (5, 5, 5)) .get_data()
v1 = np.asanyarray(tests.make_random_image(v1file, (5, 5, 5, 3)).dataobj)
v2 = np.asanyarray(tests.make_random_image(v2file, (5, 5, 5, 3)).dataobj)
v3 = np.asanyarray(tests.make_random_image(v3file, (5, 5, 5, 3)).dataobj)
l1 = np.asanyarray(tests.make_random_image(l1file, (5, 5, 5)) .dataobj)
l2 = np.asanyarray(tests.make_random_image(l2file, (5, 5, 5)) .dataobj)
l3 = np.asanyarray(tests.make_random_image(l3file, (5, 5, 5)) .dataobj)
dtiobj = dtifit.DTIFitTensor(testdir)
......
......@@ -5,6 +5,7 @@
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
import pathlib
import numpy as np
import nibabel as nib
......@@ -12,7 +13,7 @@ import nibabel as nib
import fsl.utils.tempdir as tempdir
import fsl.utils.ensure as ensure
from . import make_random_image
from fsl.tests import make_random_image
def test_ensureIsImage():
......@@ -22,12 +23,14 @@ def test_ensureIsImage():
assert ensure.ensureIsImage(img) is img
loaded = [ensure.ensureIsImage('image.nii'),
ensure.ensureIsImage('image')]
loaded = [ensure.ensureIsImage( 'image.nii'),
ensure.ensureIsImage( 'image'),
ensure.ensureIsImage(pathlib.Path('image')),
ensure.ensureIsImage(pathlib.Path('image.nii'))]
for l in loaded:
assert isinstance(l, nib.nifti1.Nifti1Image)
assert np.all(img.get_data() == l.get_data())
assert np.all(np.asanyarray(img.dataobj) == np.asanyarray(l.dataobj))
l = None
loaded = None
......
......@@ -17,7 +17,7 @@ import numpy as np
import pytest
import tests
import fsl.tests as tests
import fsl.data.featanalysis as featanalysis
import fsl.data.featdesign as featdesign
import fsl.data.image as fslimage
......@@ -59,7 +59,7 @@ def test_isFEATDir():
# it's not a feat directory
assert not featanalysis.isFEATDir('nonexistent.feat')
# If any of the above files are not
# If any of the above files are not
# present, it is not a FEAT directory
perms = it.chain(it.combinations(paths, 1),
it.combinations(paths, 2),
......@@ -69,7 +69,7 @@ def test_isFEATDir():
assert not featanalysis.isFEATDir(
op.join(testdir, 'analysis.feat'))
def test_hasStats():
with tests.testdir(['analysis.feat/stats/zstat1.nii.gz']) as testdir:
......@@ -78,7 +78,7 @@ def test_hasStats():
with tests.testdir(['analysis.feat/stats/zstat1.txt']) as testdir:
featdir = op.join(testdir, 'analysis.feat')
assert not featanalysis.hasStats(featdir)
assert not featanalysis.hasStats(featdir)
def test_hasMelodicDir():
......@@ -94,7 +94,7 @@ def test_getAnalysisDir():
'analysis.feat/design.fsf',
'analysis.feat/design.mat',
'analysis.feat/design.con']
testpaths = ['analysis.feat/filtered_func_data.nii.gz',
'analysis.feat/stats/zstat1.nii.gz',
'analysis.feat/logs/feat4_post',
......@@ -106,7 +106,7 @@ def test_getAnalysisDir():
t = op.join(testdir, t)
assert featanalysis.getAnalysisDir(t) == expected
def test_getTopLevelAnalysisDir():
testcases = [
('analysis.feat/filtered_func_data.ica/melodic_IC.nii.gz', 'analysis.feat'),
......@@ -127,7 +127,7 @@ def test_getReportFile():
for paths, expected in testcases:
with tests.testdir(paths) as testdir:
featdir = op.join(testdir, 'analysis.feat')
if expected:
......@@ -145,7 +145,7 @@ def test_loadContrasts():
/ContrastName1 c1
/ContrastName2 c2
/ContrastName3 c3
/NumContrasts 3
/NumContrasts 3
/Matrix
1 0 0
0 1 0
......@@ -188,9 +188,9 @@ def test_loadContrasts():
0 1 1
""",
]
with pytest.raises(Exception):
featanalysis.loadContrasts('no file')
featanalysis.loadContrasts('no file')
with tests.testdir() as testdir:
featdir = op.join(testdir, 'analysis.feat')
......@@ -210,6 +210,77 @@ def test_loadContrasts():
featanalysis.loadContrasts(featdir)
def test_loadFTests():
goodtests = [
("""
/NumWaves 4
/NumContrasts 3
/Matrix
0 1 0 1
0 0 1 1
1 1 1 1
""",
[[0, 1, 0, 1],
[0, 0, 1, 1],
[1, 1, 1, 1]]),
("""
/NumWaves 10
/NumContrasts 2
/Matrix
0 1 0 1 0 1 1 0 0 1
0 0 1 1 1 0 0 1 0 0
""",
[[0, 1, 0, 1, 0, 1, 1, 0, 0, 1],
[0, 0, 1, 1, 1, 0, 0, 1, 0, 0]]),
]
badtests = [
"""
/NumWaves 10
/NumContrasts 2
""",
"""
/NumContrasts 2
/Matrix
1 0
0 1
""",
"""
/NumWaves Burgers
/NumContrasts 2
/Matrix
1 0
0 1
""",
"""
/Matrix
1 0
0 1
""",
"""
/NumWaves 4
/NumContrasts 3
/Matrix
1 0 0 0 1 0 0
0 1 0 0 1 0 0
""",
]
with tests.testdir() as testdir:
featdir = op.join(testdir, 'analysis.feat')
for contents, expect in goodtests:
designcon = op.join(featdir, 'design.fts')
tests.make_dummy_file(designcon, textwrap.dedent(contents).strip())
assert featanalysis.loadFTests(featdir) == expect
for contents in badtests:
designcon = op.join(featdir, 'design.fts')
tests.make_dummy_file(designcon, textwrap.dedent(contents).strip())
with pytest.raises(Exception):
featanalysis.loadFTests(featdir)
def test_loadSettings():
contents = """
......@@ -240,9 +311,10 @@ def test_loadSettings():
with tests.testdir() as testdir:
featdir = op.join(testdir, 'analysis.feat')
tests.make_dummy_file(op.join(featdir, 'design.fsf'), contents)
result = featanalysis.loadSettings(featdir)
assert result == expected
designfsf = op.join(featdir, 'design.fsf')
tests.make_dummy_file(designfsf, contents)
assert featanalysis.loadSettings(featdir) == expected
assert featanalysis.loadFsf(designfsf) == expected
def test_loadDesign():
......@@ -274,7 +346,7 @@ def test_isFirstLevelAnalysis():
'2ndlevel_1.gfeat', '2ndlevel_2.gfeat']
for featdir in featdirs:
expected = featdir.startswith('1')
featdir = op.join(datadir, featdir)
settings = featanalysis.loadSettings(featdir)
......@@ -288,7 +360,10 @@ def test_loadClusterResults():
'2ndlevel_1.gfeat/cope1.feat', '2ndlevel_1.gfeat/cope2.feat',
'2ndlevel_2.gfeat/cope1.feat', '2ndlevel_2.gfeat/cope2.feat']
ncontrasts = [2, 2, 2, 1, 1, 1, 1]
nclusters = [[1, 5], [2, 2], [3, 5], [7], [1], [10], [27]]
nftests = [0, 0, 1, 0, 0, 1, 1]
# nclusters = [contrastclusters] + [ftestclusters]
nclusters = [[1, 5], [2, 2], [3, 5, 3], [7], [1], [10, 8], [27, 21]]
with pytest.raises(Exception):
featanalysis.loadClusterResults('notafeatdir')
......@@ -300,17 +375,18 @@ def test_loadClusterResults():
with tests.testdir() as testdir:
# For higher level analyses, the
# loadClusterResults function peeks
# at the FEAT input data file
# header, so we have to generate it.
# work from a copy of the test data directory
newfeatdir = op.join(testdir, 'analysis.feat')
shutil.copytree(op.join(datadir, featdir), newfeatdir)
featdir = newfeatdir
# For higher level analyses, the
# loadClusterResults function peeks
# at the FEAT input data file
# header, so we have to generate it.
if not firstlevel:
datafile = op.join(featdir, 'filtered_func_data.nii.gz')
data = np.random.randint(1, 10, (91, 109, 91))
data = np.random.randint(1, 10, (91, 109, 91), dtype=np.int32)
xform = np.array([[-2, 0, 0, 90],
[ 0, 2, 0, -126],
[ 0, 0, 2, -72],
......@@ -318,20 +394,55 @@ def test_loadClusterResults():
fslimage.Image(data, xform=xform).save(datafile)
settings = featanalysis.loadSettings(featdir)
# contrasts
for c in range(ncontrasts[i]):
clusters = featanalysis.loadClusterResults(
featdir, settings, c)
assert len(clusters) == nclusters[i][c]
# f-tests
for c in range(nftests[i]):
clusters = featanalysis.loadClusterResults(
featdir, settings, c, ftest=True)
assert len(clusters) == nclusters[i][c + ncontrasts[i]]
# Test calling the function on a feat dir
# which doesn't have any cluster results
# (2ndlevel_2.gfeat)
if i == len(featdirs) - 1:
for clustfile in glob.glob(op.join(featdir, 'cluster*txt')):
os.remove(clustfile)
assert featanalysis.loadClusterResults(
featdir, settings, 0) is None
# The above loop just checks that the number of
# clusters loaded for each analysis was correct.
# Below we check that the cluster data was loaded
# correctly, just for one analysis
featdir = op.join(datadir, '1stlevel_1.feat')
settings = featanalysis.loadSettings(featdir)
cluster = featanalysis.loadClusterResults(featdir, settings, 0)[0]
expected = {
'index' : 1,
'nvoxels' : 296,
'p' : 1.79e-27,
'logp' : 26.7,
'zmax' : 6.03,
'zmaxx' : 34,
'zmaxy' : 10,
'zmaxz' : 1,
'zcogx' : 31.4,
'zcogy' : 12.3,
'zcogz' : 1.72,
'copemax' : 612,
'copemaxx' : 34,
'copemaxy' : 10,
'copemaxz' : 1,
'copemean' : 143
}
for k, v in expected.items():
assert np.isclose(v, getattr(cluster, k))
def test_getDataFile():
paths = ['analysis.feat/filtered_func_data.nii.gz',
......@@ -392,9 +503,9 @@ def test_getResidualFile():
assert featanalysis.getResidualFile(featdir) == expect
else:
with pytest.raises(fslpath.PathError):
featanalysis.getResidualFile(featdir)
featanalysis.getResidualFile(featdir)
def test_getPEFile():
testcases = [
(['analysis.feat/stats/pe1.nii.gz',
......@@ -416,7 +527,7 @@ def test_getPEFile():
assert featanalysis.getPEFile(featdir, pei) == expect
else:
with pytest.raises(fslpath.PathError):
featanalysis.getPEFile(featdir, pei)
featanalysis.getPEFile(featdir, pei)
def test_getCOPEFile():
......@@ -440,8 +551,32 @@ def test_getCOPEFile():
assert featanalysis.getCOPEFile(featdir, ci) == expect
else:
with pytest.raises(fslpath.PathError):
featanalysis.getCOPEFile(featdir, ci)
featanalysis.getCOPEFile(featdir, ci)
def test_getZStatFile():
testcases = [
(['analysis.feat/stats/zstat1.nii.gz',
'analysis.feat/stats/zstat2.nii.gz'], True),
(['analysis.feat/stats/zstat1.nii.gz'], True),
(['analysis.feat/stats/zstat0.nii.gz'], False),
(['analysis.feat/stats/zstat1.txt'], False),
]
for paths, shouldPass in testcases:
with tests.testdir(paths) as testdir:
featdir = op.join(testdir, 'analysis.feat')
for zi in range(len(paths)):
expect = op.join(
featdir, 'stats', 'zstat{}.nii.gz'.format(zi + 1))
if shouldPass:
assert featanalysis.getZStatFile(featdir, zi) == expect
else:
with pytest.raises(fslpath.PathError):
featanalysis.getZStatFile(featdir, zi)
def test_getZStatFile():
testcases = [
......@@ -464,8 +599,31 @@ def test_getZStatFile():
assert featanalysis.getZStatFile(featdir, zi) == expect
else:
with pytest.raises(fslpath.PathError):
featanalysis.getZStatFile(featdir, zi)
featanalysis.getZStatFile(featdir, zi)
def test_getZFStatFile():
testcases = [
(['analysis.feat/stats/zfstat1.nii.gz',
'analysis.feat/stats/zfstat2.nii.gz'], True),
(['analysis.feat/stats/zfstat1.nii.gz'], True),
(['analysis.feat/stats/zfstat0.nii.gz'], False),
(['analysis.feat/stats/zfstat1.txt'], False),
]
for paths, shouldPass in testcases:
with tests.testdir(paths) as testdir:
featdir = op.join(testdir, 'analysis.feat')
for zi in range(len(paths)):
expect = op.join(
featdir, 'stats', 'zfstat{}.nii.gz'.format(zi + 1))
if shouldPass:
assert featanalysis.getZFStatFile(featdir, zi) == expect
else:
with pytest.raises(fslpath.PathError):
featanalysis.getZFStatFile(featdir, zi)
def test_getClusterMaskFile():
testcases = [
......@@ -488,4 +646,28 @@ def test_getClusterMaskFile():
assert featanalysis.getClusterMaskFile(featdir, ci) == expect
else:
with pytest.raises(fslpath.PathError):
featanalysis.getClusterMaskFile(featdir, ci)
featanalysis.getClusterMaskFile(featdir, ci)
def test_getFClusterMaskFile():
testcases = [
(['analysis.feat/cluster_mask_zfstat1.nii.gz',
'analysis.feat/cluster_mask_zfstat2.nii.gz'], True),
(['analysis.feat/cluster_mask_zfstat1.nii.gz'], True),
(['analysis.feat/cluster_mask_zfstat0.nii.gz'], False),
(['analysis.feat/cluster_mask_zfstat1.txt'], False),
]
for paths, shouldPass in testcases:
with tests.testdir(paths) as testdir:
featdir = op.join(testdir, 'analysis.feat')
for ci in range(len(paths)):
expect = op.join(
featdir, 'cluster_mask_zfstat{}.nii.gz'.format(ci + 1))
if shouldPass:
assert featanalysis.getFClusterMaskFile(featdir, ci) == expect
else:
with pytest.raises(fslpath.PathError):
featanalysis.getFClusterMaskFile(featdir, ci)
......@@ -103,14 +103,16 @@ with the following commands:
"""
import os.path as op
import numpy as np
import os.path as op
import textwrap as tw
import numpy as np
import pytest
import tests
import fsl.data.featdesign as featdesign
import fsl.data.featanalysis as featanalysis
import fsl.tests as tests
from fsl.utils.tempdir import tempdir
import fsl.data.featdesign as featdesign
import fsl.data.featanalysis as featanalysis
datadir = op.join(op.dirname(__file__), 'testdata', 'test_feat')
......@@ -397,3 +399,72 @@ def test_loadDesignMat():
with pytest.raises(Exception):
featdesign.loadDesignMat(badfile)
# fsl/fslpy!469
def test_loadFEATDesignFile():
with tempdir():
with open('design1.con', 'wt') as f:
f.write(tw.dedent("""
/ContrastName1 mycontrast
/NumWaves 2
/NumContrasts 1
/Matrix
10 20
""").strip())
with open('design2.con', 'wt') as f:
f.write(tw.dedent("""
/ContrastName1
/NumWaves 2
/NumContrasts 1
/Matrix
10 20
""").strip())
des1 = featanalysis.loadFEATDesignFile('design1.con')
exp1 = {'ContrastName1': 'mycontrast',
'NumWaves': '2',
'NumContrasts': '1',
'Matrix': '10 20'}
des2 = featanalysis.loadFEATDesignFile('design2.con')
exp2 = {'ContrastName1': '',
'NumWaves': '2',
'NumContrasts': '1',
'Matrix': '10 20'}
assert des1 == exp1
assert des2 == exp2
def test_VoxelwiseEVs():
with tempdir():
img = tests.make_random_image('image.nii.gz', (10, 10, 10, 10))
ev1 = featdesign.VoxelwiseEV( 0, 0, 'ev1', 'image.nii.gz')
ev2 = featdesign.VoxelwiseConfoundEV(0, 0, 'ev2', 'image.nii.gz')
for xyz in tests.random_voxels((10, 10, 10), 10):
x, y, z = map(int, xyz)
exp = img.dataobj[x, y, z, :]
assert np.all(ev1.image[x, y, z, :] == exp)
assert np.all(ev2.image[x, y, z, :] == exp)
def test_compressed_voxelwise_ev():
testcases = [((1, 1, 10, 10), (0, 0, 5)),
((1, 10, 1, 10), (0, 5, 0)),
((10, 1, 1, 10), (5, 0, 0))]
with tempdir():
for shape, vox in testcases:
img = tests.make_random_image('vev.nii.gz', shape)
vev = featdesign.VoxelwiseEV(0, 0, 'ev1', 'vev.nii.gz')
x, y, z = vox
assert np.all(vev.getData(5, 5, 5) == img.dataobj[x, y, z, :])
......@@ -16,7 +16,7 @@ import numpy as np
import pytest
import tests
import fsl.tests as tests
import fsl.data.featimage as featimage
import fsl.data.featdesign as featdesign
import fsl.data.featanalysis as featanalysis
......@@ -88,7 +88,8 @@ def test_FEATImage_attributes():
copes=False,
zstats=False,
residuals=False,
clustMasks=False)
clustMasks=False,
zfstats=False)
else:
featdir = op.join(datadir, featdir)
......@@ -100,6 +101,7 @@ def test_FEATImage_attributes():
design = featdesign.FEATFSFDesign(featdir, settings)
desmat = design.getDesign()
evnames = [ev.title for ev in design.getEVs()]
ftests = featanalysis.loadFTests(featdir)
contrastnames, contrasts = featanalysis.loadContrasts(featdir)
assert np.all(np.isclose(fi.shape, shape))
......@@ -115,8 +117,10 @@ def test_FEATImage_attributes():
assert fi.numEVs() == desmat.shape[1]
assert fi.evNames() == evnames
assert fi.numContrasts() == len(contrasts)
assert fi.numFTests() == len(ftests)
assert fi.contrastNames() == contrastnames
assert fi.contrasts() == contrasts
assert fi.ftests() == ftests
assert np.all(np.isclose(fi.getDesign(), desmat))
assert fi.thresholds() == featanalysis.getThresholds(settings)
......@@ -138,7 +142,7 @@ def test_FEATImage_imageAccessors():
shape = TEST_ANALYSES[featdir]['shape']
xform = TEST_ANALYSES[featdir]['xform']
with tests.testdir() as testdir:
if 'realdata' not in featdir:
......@@ -153,9 +157,10 @@ def test_FEATImage_imageAccessors():
shape4D = shape
shape = shape4D[:3]
fi = featimage.FEATImage(featdir)
nevs = fi.numEVs()
ncons = fi.numContrasts()
fi = featimage.FEATImage(featdir)
nevs = fi.numEVs()
ncons = fi.numContrasts()
nftests = fi.numFTests()
# Testing the FEATImage internal cache
for i in range(2):
......@@ -166,6 +171,9 @@ def test_FEATImage_imageAccessors():
assert fi.getCOPE( con).shape == shape
assert fi.getZStats( con).shape == shape
assert fi.getClusterMask(con).shape == shape
for ft in range(nftests):
assert fi.getZFStats( ft).shape == shape
assert fi.getFClusterMask(ft).shape == shape
del fi
fi = None
......
parent
[opt_layer_{opt}]
sub_file.nii.gz
opt_file_{opt}.nii.gz (opt_file)
\ No newline at end of file