Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • paulmc/fslpy
  • ndcn0236/fslpy
  • seanf/fslpy
3 results
Show changes
Showing
with 3740 additions and 1311 deletions
...@@ -36,26 +36,28 @@ load an atlas image, which will be one of the following atlas-specific ...@@ -36,26 +36,28 @@ load an atlas image, which will be one of the following atlas-specific
:nosignatures: :nosignatures:
LabelAtlas LabelAtlas
StatisticAtlas
ProbabilisticAtlas ProbabilisticAtlas
""" """
from __future__ import division from __future__ import division
import xml.etree.ElementTree as et import xml.etree.ElementTree as et
import os.path as op import os.path as op
import glob import glob
import bisect import bisect
import logging import logging
import numpy as np import numpy as np
import fsl.data.image as fslimage import fsl.data.image as fslimage
import fsl.data.constants as constants import fsl.data.constants as constants
from fsl.utils.platform import platform as platform from fsl.utils.platform import platform
import fsl.utils.transform as transform import fsl.utils.image.resample as resample
import fsl.utils.notifier as notifier import fsl.transform.affine as affine
import fsl.utils.settings as fslsettings import fsl.utils.notifier as notifier
import fsl.utils.settings as fslsettings
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -321,9 +323,9 @@ class AtlasLabel(object): ...@@ -321,9 +323,9 @@ class AtlasLabel(object):
========= ================================================================ ========= ================================================================
``name`` Region name ``name`` Region name
``index`` The index of this label into the list of all labels in the ``index`` The index of this label into the list of all labels in the
``AtlasDescription`` that owns it. For probabilistic atlases, ``AtlasDescription`` that owns it. For statistic/probabilistic
this is also the index into the 4D atlas image of the volume atlases, this is also the index into the 4D atlas image of the
that corresponds to this region. volume that corresponds to this region.
``value`` For label atlases and summary images, the value of voxels that ``value`` For label atlases and summary images, the value of voxels that
are in this region. are in this region.
``x`` X coordinate of the region in world space ``x`` X coordinate of the region in world space
...@@ -365,8 +367,17 @@ class AtlasLabel(object): ...@@ -365,8 +367,17 @@ class AtlasLabel(object):
""" """
return self.index < other.index return self.index < other.index
def __repr__(self):
"""
Represents AtlasLabel as string
"""
return '{}({}, index={}, value={})'.format(
self.__class__.__name__, self.name,
self.index, self.value,
)
class AtlasDescription(object): class AtlasDescription:
"""An ``AtlasDescription`` instance parses and stores the information """An ``AtlasDescription`` instance parses and stores the information
stored in the FSL XML file that describes a single FSL atlas. An XML stored in the FSL XML file that describes a single FSL atlas. An XML
atlas specification file is assumed to have a structure that looks like atlas specification file is assumed to have a structure that looks like
...@@ -376,8 +387,13 @@ class AtlasDescription(object): ...@@ -376,8 +387,13 @@ class AtlasDescription(object):
<atlas> <atlas>
<header> <header>
<name></name> # Atlas name <name></name> # Atlas name
<type></type> # 'Probabilistic' or 'Label' <type></type> # 'Statistic', 'Probabilistic' or 'Label'
<statistic></statistic> # Optional. Type of statistic
<units></units> # Optional. Units of measurement
<precision></precision> # Optional. Decimal precision to report
<upper></upper> # Optional. Upper threshold
<lower></lower> # Optional. Lower threshold
<images> <images>
<imagefile> <imagefile>
</imagefile> # If type is Probabilistic, path </imagefile> # If type is Probabilistic, path
...@@ -402,11 +418,12 @@ class AtlasDescription(object): ...@@ -402,11 +418,12 @@ class AtlasDescription(object):
</header> </header>
<data> <data>
# index - For probabilistic atlases, index of corresponding volume in # index - For statistic/probabilistic atlases, index of corresponding
# 4D image file. For label images, the value of voxels which # volume in 4D image file. For label images, the value of
# are in the corresponding region. For probabilistic atlases, # voxels which are in the corresponding region. For
# it is assumed that the value for each region in the summary # statistic/probabilistic atlases, it is assumed that the
# image(s) are equal to ``index + 1``. # value for each region in the summary image(s) are equal to
# ``index + 1``.
# #
# #
# x | # x |
...@@ -442,7 +459,18 @@ class AtlasDescription(object): ...@@ -442,7 +459,18 @@ class AtlasDescription(object):
``specPath`` Path to the atlas XML specification file. ``specPath`` Path to the atlas XML specification file.
``atlasType`` Atlas type - either *probabilistic* or *label*. ``atlasType`` Atlas type - either *statistic*, *probabilistic* or
*label*.
``statistic`` Type of statistic, for statistic atlases.
``units`` Unit of measurement, for statistic atlases.
``precision`` Reporting precision, for statistic atlases.
``upper`` Upper threshold, for statistic atlases.
``lower`` Lower threshold, for statistic atlases.
``images`` A list of images available for this atlas - usually ``images`` A list of images available for this atlas - usually
:math:`1mm^3` and :math:`2mm^3` images are present. :math:`1mm^3` and :math:`2mm^3` images are present.
...@@ -490,6 +518,29 @@ class AtlasDescription(object): ...@@ -490,6 +518,29 @@ class AtlasDescription(object):
if self.atlasType == 'probabalistic': if self.atlasType == 'probabalistic':
self.atlasType = 'probabilistic' self.atlasType = 'probabilistic'
if self.atlasType == 'statistic':
fields = ['statistic', 'units', 'lower', 'upper', 'precision']
values = {}
for field in fields:
elem = header.find(field)
if elem is not None and elem.text is not None:
values[field] = elem.text.strip()
self.statistic = values.get('statistic', '')
self.units = values.get('units', '')
self.lower = float(values.get('lower', 0))
self.upper = float(values.get('upper', 100))
self.precision = int( values.get('precision', 2))
elif self.atlasType == 'probabilistic':
self.statistic = ''
self.units = '%'
self.lower = 5
self.upper = 100
self.precision = 0
images = header.findall('images') images = header.findall('images')
self.images = [] self.images = []
self.summaryImages = [] self.summaryImages = []
...@@ -509,7 +560,7 @@ class AtlasDescription(object): ...@@ -509,7 +560,7 @@ class AtlasDescription(object):
imagefile = op.normpath(atlasDir + imagefile) imagefile = op.normpath(atlasDir + imagefile)
summaryimagefile = op.normpath(atlasDir + summaryimagefile) summaryimagefile = op.normpath(atlasDir + summaryimagefile)
i = fslimage.Image(imagefile, loadData=False, calcRange=False) i = fslimage.Image(imagefile)
self.images .append(imagefile) self.images .append(imagefile)
self.summaryImages.append(summaryimagefile) self.summaryImages.append(summaryimagefile)
...@@ -562,7 +613,7 @@ class AtlasDescription(object): ...@@ -562,7 +613,7 @@ class AtlasDescription(object):
# Load the appropriate transformation matrix # Load the appropriate transformation matrix
# and transform all those voxel coordinates # and transform all those voxel coordinates
# into world coordinates # into world coordinates
coords = transform.transform(coords, self.xforms[0]) coords = affine.transform(coords, self.xforms[0])
# Update the coordinates # Update the coordinates
# in our label objects # in our label objects
...@@ -573,11 +624,11 @@ class AtlasDescription(object): ...@@ -573,11 +624,11 @@ class AtlasDescription(object):
self.labels = list(sorted(self.labels)) self.labels = list(sorted(self.labels))
def find(self, index=None, value=None): def find(self, index=None, value=None, name=None):
"""Find an :class:`.AtlasLabel` either by ``index``, or by ``value``. """Find an :class:`.AtlasLabel` either by ``index``, or by ``value``.
Exactly one of ``index`` or ``value`` may be specified - a Exactly one of ``index``, ``value``, or ``name`` may be specified - a
``ValueError`` is raised otherwise. If an invalid ``index`` or ``ValueError`` is raised otherwise. If an invalid ``index``, ``name``, or
``value`` is specified, an ``IndexError`` or ``KeyError`` will be ``value`` is specified, an ``IndexError`` or ``KeyError`` will be
raised. raised.
...@@ -585,12 +636,25 @@ class AtlasDescription(object): ...@@ -585,12 +636,25 @@ class AtlasDescription(object):
labels, and a 3D ``LabelAtlas`` may have more values labels, and a 3D ``LabelAtlas`` may have more values
than labels. than labels.
""" """
if (index is None and value is None) or \ if ((index is not None) + (value is not None) + (name is not None)) != 1:
(index is not None and value is not None): raise ValueError('Only one of index, value, or name may be specified')
raise ValueError('Only one of index or value may be specified') if index is not None: return self.labels[ index]
elif value is not None: return self.__labelsByValue[int(value)]
else:
matches = [l for l in self.labels if l.name == name]
if len(matches) == 0:
# look for partial matches only if there are no full matches
matches = [l for l in self.labels if l.name[:len(name)] == name]
if len(matches) == 0:
raise IndexError('No match for {} found in labels {}'.format(
name, tuple(l.name for l in self.labels)
))
elif len(matches) > 1:
raise IndexError('Multiple matches for {} found in labels {}'.format(
name, tuple(l.name for l in self.labels)
))
return matches[0]
if index is not None: return self.labels[ index]
else: return self.__labelsByValue[int(value)]
def __eq__(self, other): def __eq__(self, other):
...@@ -611,6 +675,12 @@ class AtlasDescription(object): ...@@ -611,6 +675,12 @@ class AtlasDescription(object):
""" """
return self.name.lower() < other.name.lower() return self.name.lower() < other.name.lower()
def __repr__(self, ):
"""
String representation of AtlasDescription
"""
return '{}({})'.format(self.__class__.__name__, self.atlasID)
class Atlas(fslimage.Image): class Atlas(fslimage.Image):
"""This is the base class for the :class:`LabelAtlas` and """This is the base class for the :class:`LabelAtlas` and
...@@ -632,7 +702,7 @@ class Atlas(fslimage.Image): ...@@ -632,7 +702,7 @@ class Atlas(fslimage.Image):
:arg resolution: Desired isotropic resolution in millimetres. :arg resolution: Desired isotropic resolution in millimetres.
:arg isLabel: Pass in ``True`` for label atlases, ``False`` for :arg isLabel: Pass in ``True`` for label atlases, ``False`` for
probabilistic atlases. statistic/probabilistic atlases.
All other arguments are passed to :meth:`.Image.__init__`. All other arguments are passed to :meth:`.Image.__init__`.
""" """
...@@ -679,7 +749,7 @@ class Atlas(fslimage.Image): ...@@ -679,7 +749,7 @@ class Atlas(fslimage.Image):
"""Makes sure that the given mask has the same resolution as this """Makes sure that the given mask has the same resolution as this
atlas, so it can be used for querying. Used by the atlas, so it can be used for querying. Used by the
:meth:`.LabelAtlas.maskLabels` and :meth:`.LabelAtlas.maskLabels` and
:meth:`.ProbabilisticAtlas.maskProportions` methods. :meth:`.StatisticAtlas.maskValues` methods.
:arg mask: A :class:`.Image` :arg mask: A :class:`.Image`
...@@ -695,9 +765,8 @@ class Atlas(fslimage.Image): ...@@ -695,9 +765,8 @@ class Atlas(fslimage.Image):
# for resampling, as it is most likely # for resampling, as it is most likely
# that the mask is binary. # that the mask is binary.
try: try:
mask, xform = mask.resample(self.shape[:3], mask, xform = resample.resample(
dtype=np.float32, mask, self.shape[:3], dtype=np.float32, order=0)
order=0)
except ValueError: except ValueError:
raise MaskError('Mask has wrong number of dimensions') raise MaskError('Mask has wrong number of dimensions')
...@@ -710,13 +779,11 @@ class Atlas(fslimage.Image): ...@@ -710,13 +779,11 @@ class Atlas(fslimage.Image):
return mask return mask
class MaskError(Exception): class MaskError(Exception):
"""Exception raised by the :meth:`LabelAtlas.maskLabel` and """Exception raised by the :meth:`LabelAtlas.maskLabel` and
:meth:`ProbabilisticAtlas.maskProportions` when a mask is provided which :meth:`StatisticAtlas.maskValues` when a mask is provided which
does not match the atlas space. does not match the atlas space.
""" """
pass
class LabelAtlas(Atlas): class LabelAtlas(Atlas):
...@@ -782,7 +849,7 @@ class LabelAtlas(Atlas): ...@@ -782,7 +849,7 @@ class LabelAtlas(Atlas):
""" """
if not voxel: if not voxel:
loc = transform.transform([loc], self.worldToVoxMat)[0] loc = affine.transform([loc], self.worldToVoxMat)[0]
loc = [int(v) for v in loc.round()] loc = [int(v) for v in loc.round()]
if loc[0] < 0 or \ if loc[0] < 0 or \
...@@ -813,10 +880,17 @@ class LabelAtlas(Atlas): ...@@ -813,10 +880,17 @@ class LabelAtlas(Atlas):
of each present value. The proportions are returned as of each present value. The proportions are returned as
values between 0 and 100. values between 0 and 100.
.. note:: Calling this method will cause the atlas image data to be
loaded into memory.
.. note:: Use the :meth:`find` method to retrieve the ``AtlasLabel`` .. note:: Use the :meth:`find` method to retrieve the ``AtlasLabel``
associated with each returned value. associated with each returned value.
""" """
# Mask-based indexing requires the image
# data to be loaded into memory
self.data
# Extract the values that are in # Extract the values that are in
# the mask, and their corresponding # the mask, and their corresponding
# mask weights # mask weights
...@@ -850,15 +924,48 @@ class LabelAtlas(Atlas): ...@@ -850,15 +924,48 @@ class LabelAtlas(Atlas):
return values, props return values, props
class ProbabilisticAtlas(Atlas): def get(self, label=None, index=None, value=None, name=None, binary=True):
"""A 4D atlas which contains one volume for each region. """Returns the binary image for the given label.
Only one of the arguments should be used to define the label
:arg label: :class:`AtlasLabel` contained within this atlas
:arg index: index of the label
:arg value: value of the label
:arg name: string of the label
:arg binary: If ``True`` (the default), the image will contain 1s in
the label region. Otherwise the image will contain the
label value.
:return: :class:`.Image` with the mask
"""
if ((label is not None) + (index is not None) +
(value is not None) + (name is not None)) != 1:
raise ValueError('Only one of label, index, value, or name may be specified')
if label is None:
label = self.find(index=index, name=name, value=value)
elif label not in self.desc.labels:
raise ValueError("Unknown label provided")
arr = (self.data == label.value).astype(np.int32)
if not binary:
arr[arr > 0] = label.value
return fslimage.Image(arr, name=label.name, header=self.header)
The ``ProbabilisticAtlas`` provides the :meth`proportions` method, class StatisticAtlas(Atlas):
which makes looking up region probabilities easy. """A ``StatisticAtlas`` is a 4D image which contains one volume for
each region in the atlas; each volume contains some statistic value
for the corresponding region.
The :class:`ProbabilisticAtlas` is a specialisation of the
``StatisticAtlas``
""" """
def __init__(self, atlasDesc, resolution=None, **kwargs): def __init__(self, atlasDesc, resolution=None, **kwargs):
"""Create a ``ProbabilisticAtlas`` instance. """Create a ``StatisticAtlas`` instance.
:arg atlasDesc: The :class:`AtlasDescription` instance describing :arg atlasDesc: The :class:`AtlasDescription` instance describing
the atlas. the atlas.
...@@ -868,36 +975,59 @@ class ProbabilisticAtlas(Atlas): ...@@ -868,36 +975,59 @@ class ProbabilisticAtlas(Atlas):
Atlas.__init__(self, atlasDesc, resolution, False, **kwargs) Atlas.__init__(self, atlasDesc, resolution, False, **kwargs)
def proportions(self, location, *args, **kwargs): def get(self, label=None, index=None, value=None, name=None):
"""Looks up and returns the proportions of of all regions at the given """Returns the statistic image at the given label.
Only one of the arguments should be used to define the label
:arg label: :class:`AtlasLabel` contained within this atlas
:arg index: index of the label
:arg value: value of the label
:arg name: string of the label
:return: :class:`.Image` with the statistic values for the
specified label.
"""
if ((label is not None) + (index is not None) +
(value is not None) + (name is not None)) != 1:
raise ValueError('Only one of label, index, value, or name may be specified')
if label is None:
label = self.find(index=index, value=value, name=name)
elif label not in self.desc.labels:
raise ValueError("Unknown label provided")
arr = self[..., label.index]
return fslimage.Image(arr, name=label.name, header=self.header)
def values(self, location, *args, **kwargs):
"""Looks up and returns the values of of all regions at the given
location. location.
:arg location: Can be one of the following: :arg location: Can be one of the following:
- A sequence of three values, interpreted as atlas - A sequence of three values, interpreted as atlas
coordinates. In this case, :meth:`coordProportions` coordinates. In this case, :meth:`coordValues`
is called. is called.
- An :class:`.Image` which is interpreted as a - An :class:`.Image` which is interpreted as a
weighted mask. In this case, :meth:`maskProportions` weighted mask. In this case, :meth:`maskValues`
is called. is called.
All other arguments are passed through to the :meth:`coordProportions` All other arguments are passed through to the :meth:`coordValues`
or :meth:`maskProportions` methods. or :meth:`maskValues` methods.
:returns: The return value of either :meth:`coordProportions` or :returns: The return value of either :meth:`coordValues` or
:meth:`maskProportions`. :meth:`maskValues`.
""" """
if isinstance(location, fslimage.Image): if isinstance(location, fslimage.Image):
return self.maskProportions(location, *args, **kwargs) return self.maskValues(location, *args, **kwargs)
else: else:
return self.coordProportions(location, *args, **kwargs) return self.coordValues(location, *args, **kwargs)
def coordProportions(self, loc, voxel=False): def coordValues(self, loc, voxel=False):
"""Looks up the region probabilities for the given location. """Looks up the region values for the given location.
:arg loc: A sequence of three values, interpreted as atlas :arg loc: A sequence of three values, interpreted as atlas
world or voxel coordinates. world or voxel coordinates.
...@@ -905,14 +1035,12 @@ class ProbabilisticAtlas(Atlas): ...@@ -905,14 +1035,12 @@ class ProbabilisticAtlas(Atlas):
:arg voxel: Defaults to ``False``. If ``True``, the ``loc`` :arg voxel: Defaults to ``False``. If ``True``, the ``loc``
argument is interpreted as voxel coordinates. argument is interpreted as voxel coordinates.
:returns: a list of values, one per region, which represent :returns: a list of values, one per region. Returns an empty
the probability of each region for the specified list if the given location is out of bounds.
location. Returns an empty list if the given
location is out of bounds.
""" """
if not voxel: if not voxel:
loc = transform.transform([loc], self.worldToVoxMat)[0] loc = affine.transform([loc], self.worldToVoxMat)[0]
loc = [int(v) for v in loc.round()] loc = [int(v) for v in loc.round()]
if loc[0] < 0 or \ if loc[0] < 0 or \
...@@ -923,30 +1051,27 @@ class ProbabilisticAtlas(Atlas): ...@@ -923,30 +1051,27 @@ class ProbabilisticAtlas(Atlas):
loc[2] >= self.shape[2]: loc[2] >= self.shape[2]:
return [] return []
props = self[loc[0], loc[1], loc[2], :] vals = self[loc[0], loc[1], loc[2], :]
# We only return labels for this atlas - # We only return labels for this atlas -
# the underlying image may have more # the underlying image may have more
# volumes than this atlas has labels. # volumes than this atlas has labels.
return [props[l.index] for l in self.desc.labels] return [vals[l.index] for l in self.desc.labels]
def maskProportions(self, mask): def maskValues(self, mask):
"""Looks up the probabilities of all regions in the given ``mask``. """Looks up the average values of all regions in the given ``mask``.
:arg mask: A 3D :class:`.Image`` which is interpreted as a weighted :arg mask: A 3D :class:`.Image`` which is interpreted as a weighted
mask. If the ``mask`` shape does not match that of this mask. If the ``mask`` shape does not match that of this
``ProbabilisticAtlas``, it is resampled using ``StatisticAtlas``, it is resampled using
:meth:`.Image.resample`, with nearest-neighbour :meth:`Atlas.prepareMask`.
interpolation.
:returns: A sequence containing the proportion, within the mask, :returns: A sequence containing the average value, within the mask,
of all regions in the atlas. The proportions are returned as of all regions in the atlas.
values between 0 and 100.
""" """
props = [] avgvals = []
mask = self.prepareMask(mask) mask = self.prepareMask(mask)
boolmask = mask > 0 boolmask = mask > 0
weights = mask[boolmask] weights = mask[boolmask]
...@@ -959,11 +1084,17 @@ class ProbabilisticAtlas(Atlas): ...@@ -959,11 +1084,17 @@ class ProbabilisticAtlas(Atlas):
vals = self[..., label.index] vals = self[..., label.index]
vals = vals[boolmask] * weights vals = vals[boolmask] * weights
prop = vals.sum() / weightsum val = vals.sum() / weightsum
avgvals.append(val)
props.append(prop) return avgvals
return props
class ProbabilisticAtlas(StatisticAtlas):
"""A 4D atlas which contains one volume for each region. Each volume
contains probabiliy values for one region, between 0 and 100.
"""
registry = AtlasRegistry() registry = AtlasRegistry()
......
#!/usr/bin/env python
#
# bitmap.py - The Bitmap class
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
"""This module contains the :class:`Bitmap` class, for loading bitmap image
files. Pillow is required to use the ``Bitmap`` class.
"""
import os.path as op
import pathlib
import logging
import numpy as np
import fsl.data.image as fslimage
log = logging.getLogger(__name__)
BITMAP_EXTENSIONS = ['.bmp', '.png', '.jpg', '.jpeg',
'.tif', '.tiff', '.gif', '.rgba',
'.jp2', '.jpg2', '.jp2k']
"""File extensions we understand. """
BITMAP_DESCRIPTIONS = [
'Bitmap',
'Portable Network Graphics',
'JPEG',
'JPEG',
'TIFF',
'TIFF',
'Graphics Interchange Format',
'Raw RGBA',
'JPEG 2000',
'JPEG 2000',
'JPEG 2000']
"""A description for each :attr:`BITMAP_EXTENSION`. """
class Bitmap(object):
"""The ``Bitmap`` class can be used to load a bitmap image. The
:meth:`asImage` method will convert the bitmap into an :class:`.Image`
instance.
"""
def __init__(self, bmp):
"""Create a ``Bitmap``.
:arg bmp: File name of an image, or a ``numpy`` array containing image
data.
"""
if isinstance(bmp, (pathlib.Path, str)):
try:
# Allow big/truncated images
import PIL.Image as Image
import PIL.ImageFile as ImageFile
Image .MAX_IMAGE_PIXELS = None
ImageFile.LOAD_TRUNCATED_IMAGES = True
except ImportError:
raise RuntimeError('Install Pillow to use the Bitmap class')
src = str(bmp)
img = Image.open(src)
# If this is a palette/LUT
# image, convert it into a
# regular rgb(a) image.
if img.mode == 'P':
img = img.convert()
data = np.array(img)
elif isinstance(bmp, np.ndarray):
src = 'array'
data = np.copy(bmp)
else:
raise ValueError('unknown bitmap: {}'.format(bmp))
# Make the array (w, h, c). Single channel
# (e.g. greyscale) images are returned as
# 2D arrays, whereas multi-channel images
# are returned as 3D. In either case, the
# first two dimensions are (height, width),
# but we watn them the other way aruond.
data = np.atleast_3d(data)
data = np.fliplr(data.transpose((1, 0, 2)))
data = np.array(data, dtype=np.uint8, order='C')
w, h = data.shape[:2]
self.__data = data
self.__dataSource = src
self.__name = op.basename(src)
def __hash__(self):
"""Returns a number which uniquely idenfities this ``Bitmap`` instance
(the result of ``id(self)``).
"""
return id(self)
def __str__(self):
"""Return a string representation of this ``Bitmap`` instance."""
return '{}({}, {})'.format(self.__class__.__name__,
self.dataSource,
self.shape)
def __repr__(self):
"""See the :meth:`__str__` method. """
return self.__str__()
@property
def name(self):
"""Returns the name of this ``Bitmap``, typically the base name of the
file.
"""
return self.__name
@property
def dataSource(self):
"""Returns the bitmap data source - typically the file name. """
return self.__dataSource
@property
def data(self):
"""Convenience method which returns the bitmap data as a ``(w, h, c)``
array, where ``c`` is either 3 or 4.
"""
return self.__data
@property
def shape(self):
"""Returns the bitmap shape - ``(width, height, nchannels)``. """
return self.__data.shape
def asImage(self):
"""Convert this ``Bitmap`` into an :class:`.Image` instance. """
width, height, nchannels = self.shape
if nchannels == 1:
dtype = np.uint8
elif nchannels == 3:
dtype = np.dtype([('R', 'uint8'),
('G', 'uint8'),
('B', 'uint8')])
elif nchannels == 4:
dtype = np.dtype([('R', 'uint8'),
('G', 'uint8'),
('B', 'uint8'),
('A', 'uint8')])
else:
raise ValueError('Cannot convert bitmap with {} '
'channels into nifti image'.format(nchannels))
if nchannels == 1:
data = self.data.reshape((width, height))
else:
data = np.zeros((width, height), dtype=dtype)
for ci, ch in enumerate(dtype.names):
data[ch] = self.data[..., ci]
data = np.asarray(data, order='F')
return fslimage.Image(data,
name=self.name,
dataSource=self.dataSource)
"""
Provides a sparse representation of volumetric and/or surface data
The data can be either defined per voxel/vertex (:class:`DenseCifti`) or per parcel (`class:`ParcelCifti`).
The data can be read from NIFTI, GIFTI, or CIFTI files.
Non-sparse volumetric or surface representations can be extracte.
"""
from nibabel.cifti2 import cifti2_axes
from typing import Sequence, Optional, Union
import numpy as np
from fsl.data import image
import nibabel as nib
from fsl.utils.path import addExt
dense_extensions = {
cifti2_axes.BrainModelAxis: '.dconn.nii',
cifti2_axes.ParcelsAxis: '.dpconn.nii',
cifti2_axes.SeriesAxis: '.dtseries.nii',
cifti2_axes.ScalarAxis: '.dscalar.nii',
cifti2_axes.LabelAxis: '.dlabel.nii',
}
parcel_extensions = {
cifti2_axes.BrainModelAxis: '.pdconn.nii',
cifti2_axes.ParcelsAxis: '.pconn.nii',
cifti2_axes.SeriesAxis: '.ptseries.nii',
cifti2_axes.ScalarAxis: '.pscalar.nii',
cifti2_axes.LabelAxis: '.plabel.nii',
}
class Cifti:
"""
Parent class for the two types of CIFTI files.
The type of the CIFTI file is determined by the last axis, which can be one of:
- :py:class:`BrainModelAxis <cifti2_axes.BrainModelAxis>`
- :py:class:`ParcelsAxis <cifti2_axes.ParcelsAxis>`
"""
def __init__(self, arr: np.ndarray, axes: Sequence[Optional[cifti2_axes.Axis]]):
"""
Defines a new dataset in greyordinate space
:param data: (..., N) array for N greyordinates or parcels; can contain Nones for undefined axes
:param axes: sequence of CIFTI axes describing the data along each dimension
"""
self.arr = arr
axes = tuple(axes)
while self.arr.ndim > len(axes):
axes = (None, ) + axes
self.axes = axes
if not all(ax is None or len(ax) == sz for ax, sz in zip(axes, self.arr.shape)):
raise ValueError(f"Shape of axes {tuple(-1 if ax is None else len(ax) for ax in axes)} does not "
f"match shape of array {self.arr.shape}")
def to_cifti(self, default_axis=None):
"""
Create a CIFTI image from the data
:param default_axis: What to use as an axis along any undefined dimensions
- By default an error is raised
- if set to "scalar" a ScalarAxis is used with names of "default {index}"
- if set to "series" a SeriesAxis is used
:return: nibabel CIFTI image
"""
if any(ax is None for ax in self.axes):
if default_axis is None:
raise ValueError("Can not store to CIFTI without defining what is stored along each dimension")
elif default_axis == 'scalar':
def get_axis(n: int):
return cifti2_axes.ScalarAxis([f'default {idx + 1}' for idx in range(n)])
elif default_axis == 'series':
def get_axis(n: int):
return cifti2_axes.SeriesAxis(0, 1, n)
else:
raise ValueError(f"default_axis should be set to None, 'scalar', or 'series', not {default_axis}")
new_axes = [
get_axis(sz) if ax is None else ax
for ax, sz in zip(self.axes, self.arr.shape)
]
else:
new_axes = list(self.axes)
data = self.arr
if data.ndim == 1:
# CIFTI axes are always at least 2D
data = data[None, :]
new_axes.insert(0, cifti2_axes.ScalarAxis(['default']))
return nib.Cifti2Image(data, header=new_axes)
@classmethod
def from_cifti(cls, filename, writable=False):
"""
Creates new greyordinate object from dense CIFTI file
:param filename: CIFTI filename or :class:`nib.Cifti2Image` object
:param writable: if True, opens data array in writable mode
"""
if isinstance(filename, str):
img = nib.load(filename)
else:
img = filename
if not isinstance(img, nib.Cifti2Image):
raise ValueError(f"Input {filename} should be CIFTI filename or nibabel Cifti2Image")
if writable:
data = np.memmap(filename, img.dataobj.dtype, mode='r+',
offset=img.dataobj.offset, shape=img.shape, order='F')
else:
data = np.asanyarray(img.dataobj)
axes = [img.header.get_axis(idx) for idx in range(data.ndim)]
if isinstance(axes[-1], cifti2_axes.BrainModelAxis):
return DenseCifti(data, axes)
elif isinstance(axes[-1], cifti2_axes.ParcelsAxis):
return ParcelCifti(data, axes)
raise ValueError("Last axis of CIFTI object should be a BrainModelAxis or ParcelsAxis")
def save(self, cifti_filename, default_axis=None):
"""
Writes this sparse representation to/from a filename
:param cifti_filename: output filename
:param default_axis: What to use as an axis along any undefined dimensions
- By default an error is raised
- if set to "scalar" a ScalarAxis is used with names of "default {index}"
- if set to "series" a SeriesAxis is used
:return:
"""
self.to_cifti(default_axis).to_filename(addExt(cifti_filename, defaultExt=self.extension, mustExist=False))
@classmethod
def from_gifti(cls, filename, mask_values=(0, np.nan)):
"""
Creates a new greyordinate object from a GIFTI file
:param filename: GIFTI filename
:param mask_values: values to mask out
:return: greyordinate object representing the unmasked vertices
"""
if isinstance(filename, str):
img = nib.load(filename)
else:
img = filename
datasets = [darr.data for darr in img.darrays]
if len(datasets) == 1:
data = datasets[0]
else:
data = np.concatenate(
[np.atleast_2d(d) for d in datasets], axis=0
)
mask = np.ones(data.shape, dtype='bool')
for value in mask_values:
if value is np.nan:
mask &= ~np.isnan(data)
else:
mask &= ~(data == value)
while mask.ndim > 1:
mask = mask.any(0)
anatomy = BrainStructure.from_gifti(img)
bm_axes = cifti2_axes.BrainModelAxis.from_mask(mask, name=anatomy.cifti)
return DenseCifti(data[..., mask], [bm_axes])
@classmethod
def from_image(cls, input, mask_values=(np.nan, 0)):
"""
Creates a new greyordinate object from a NIFTI file
:param input: FSL :class:`image.Image` object
:param mask_values: which values to mask out
:return: greyordinate object representing the unmasked voxels
"""
img = image.Image(input)
mask = np.ones(img.data.shape, dtype='bool')
for value in mask_values:
if value is np.nan:
mask &= ~np.isnan(img.data)
else:
mask &= ~(img.data == value)
while mask.ndim > 3:
mask = mask.any(-1)
if np.sum(mask) == 0:
raise ValueError("No unmasked voxels found in NIFTI image")
inverted_data = np.transpose(img.data[mask], tuple(range(1, img.data.ndim - 2)) + (0, ))
bm_axes = cifti2_axes.BrainModelAxis.from_mask(mask, affine=img.nibImage.affine)
return DenseCifti(inverted_data, [bm_axes])
class DenseCifti(Cifti):
"""
Represents sparse data defined for a subset of voxels and vertices (i.e., greyordinates)
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not isinstance(self.brain_model_axis, cifti2_axes.BrainModelAxis):
raise ValueError(f"DenseCifti expects a BrainModelAxis as last axes object, not {type(self.brain_model_axis)}")
@property
def brain_model_axis(self, ) -> cifti2_axes.BrainModelAxis:
return self.axes[-1]
@property
def extension(self, ):
if self.arr.ndim == 1:
return dense_extensions[cifti2_axes.ScalarAxis]
return dense_extensions[type(self.axes[-2])]
def to_image(self, fill=0) -> image.Image:
"""
Get the volumetric data as an :class:`image.Image`
"""
if self.brain_model_axis.volume_mask.sum() == 0:
raise ValueError(f"Can not create volume without voxels in {self}")
data = np.full(self.brain_model_axis.volume_shape + self.arr.shape[:-1], fill,
dtype=self.arr.dtype)
voxels = self.brain_model_axis.voxel[self.brain_model_axis.volume_mask]
data[tuple(voxels.T)] = np.transpose(self.arr, (-1,) + tuple(range(self.arr.ndim - 1)))[
self.brain_model_axis.volume_mask]
return image.Image(data, xform=self.brain_model_axis.affine)
def surface(self, anatomy, fill=np.nan, partial=False):
"""
Gets a specific surface
If `partial` is True a view of the data rather than a copy is returned.
:param anatomy: BrainStructure or string like 'CortexLeft' or 'CortexRight'
:param fill: which value to fill the array with if not all vertices are defined
:param partial: only return the part of the surface defined in the greyordinate file (ignores `fill` if set)
:return:
- if not partial: (..., n_vertices) array
- if partial: tuple with (N, ) int array with indices on the surface included in (..., N) array
"""
if isinstance(anatomy, str):
anatomy = BrainStructure.from_string(anatomy, issurface=True)
if anatomy.cifti not in self.brain_model_axis.name:
raise ValueError(f"No surface data for {anatomy.cifti} found")
slc, bm = None, None
arr = np.full(self.arr.shape[:-1] + (self.brain_model_axis.nvertices[anatomy.cifti],), fill,
dtype=self.arr.dtype)
for name, slc_try, bm_try in self.brain_model_axis.iter_structures():
if name == anatomy.cifti:
if partial:
if bm is not None:
raise ValueError(f"Surface {anatomy} does not form a contiguous block")
slc, bm = slc_try, bm_try
else:
arr[..., bm_try.vertex] = self.arr[..., slc_try]
if not partial:
return arr
else:
return bm.vertex, self.arr[..., slc]
class ParcelCifti(Cifti):
"""
Represents sparse data defined at specific parcels
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not isinstance(self.parcel_axis, cifti2_axes.ParcelsAxis):
raise ValueError(f"ParcelCifti expects a ParcelsAxis as last axes object, not {type(self.parcel_axis)}")
@property
def extension(self, ):
if self.arr.ndim == 1:
return parcel_extensions[cifti2_axes.ScalarAxis]
return parcel_extensions[type(self.axes[-2])]
@property
def parcel_axis(self, ) -> cifti2_axes.ParcelsAxis:
return self.axes[-1]
def to_image(self, fill=0):
"""
Get the volumetric data as an :class:`Image`
"""
data = np.full(self.parcel_axis.volume_shape + self.arr.shape[:-1], fill, dtype=self.arr.dtype)
written = np.zeros(self.parcel_axis.volume_shape, dtype='bool')
for idx, write_to in enumerate(self.parcel_axis.voxels):
if written[tuple(write_to.T)].any():
raise ValueError("Duplicate voxels in different parcels")
data[tuple(write_to.T)] = self.arr[np.newaxis, ..., idx]
written[tuple(write_to.T)] = True
if not written.any():
raise ValueError("Parcellation does not contain any volumetric data")
return image.Image(data, xform=self.parcel_axis.affine)
def surface(self, anatomy, fill=np.nan, partial=False):
"""
Gets a specific surface
:param anatomy: BrainStructure or string like 'CortexLeft' or 'CortexRight'
:param fill: which value to fill the array with if not all vertices are defined
:param partial: only return the part of the surface defined in the greyordinate file (ignores `fill` if set)
:return:
- if not partial: (..., n_vertices) array
- if partial: tuple with (N, ) int array with indices on the surface included in (..., N) array
"""
if isinstance(anatomy, str):
anatomy = BrainStructure.from_string(anatomy, issurface=True)
if anatomy.cifti not in self.parcel_axis.nvertices:
raise ValueError(f"No surface data for {anatomy.cifti} found")
arr = np.full(self.arr.shape[:-1] + (self.parcel_axis.nvertices[anatomy.cifti],), fill,
dtype=self.arr.dtype)
written = np.zeros(self.parcel_axis.nvertices[anatomy.cifti], dtype='bool')
for idx, vertices in enumerate(self.parcel_axis.vertices):
if anatomy.cifti not in vertices:
continue
write_to = vertices[anatomy.cifti]
if written[write_to].any():
raise ValueError("Duplicate vertices in different parcels")
arr[..., write_to] = self.arr[..., idx, np.newaxis]
written[write_to] = True
if not partial:
return arr
else:
return np.where(written)[0], arr[..., written]
class BrainStructure(object):
"""Which brain structure does the parent object describe?
Supports how brain structures are stored in both GIFTI and CIFTI files
"""
def __init__(self, primary, secondary=None, hemisphere='both', geometry=None):
"""Creates a new brain structure
:param primary: Name of the brain structure (e.g. cortex, thalamus)
:param secondary: Further specification of which part of the brain structure is described (e.g. 'white' or
'pial' for the cortex)
:param hemisphere: which hemisphere is the brain structure in ('left', 'right', or 'both')
:param geometry: does the parent object describe the 'volume' or the 'surface'
"""
self.primary = primary.lower()
self.secondary = None if secondary is None else secondary.lower()
self.hemisphere = hemisphere.lower()
if geometry not in (None, 'surface', 'volume'):
raise ValueError(f"Invalid value for geometry: {geometry}")
self.geometry = geometry
def __eq__(self, other):
"""Two brain structures are equal if they could describe the same structure
"""
if isinstance(other, str):
other = self.from_string(other)
match_primary = (self.primary == other.primary or self.primary == 'all' or other.primary == 'all' or
self.primary == other.geometry or self.geometry == other.primary)
match_hemisphere = self.hemisphere == other.hemisphere
match_secondary = (self.secondary is None or other.secondary is None or self.secondary == other.secondary)
match_geometry = (self.geometry is None or other.geometry is None or self.geometry == other.geometry)
return match_primary and match_hemisphere and match_secondary and match_geometry
@property
def gifti(self, ):
"""Returns the keywords needed to define the surface in the meta information of a GIFTI file
"""
main = self.primary.capitalize() + ('' if self.hemisphere == 'both' else self.hemisphere.capitalize())
res = {'AnatomicalStructurePrimary': main}
if self.secondary is not None:
res['AnatomicalStructureSecondary'] = self.secondary.capitalize()
return res
def __str__(self, ):
"""Returns a short description of the brain structure
"""
if self.secondary is None:
return self.primary.capitalize() + self.hemisphere.capitalize()
else:
return "%s%s(%s)" % (self.primary.capitalize(), self.hemisphere.capitalize(), self.secondary)
@property
def cifti(self, ):
"""Returns a description of the brain structure needed to define the surface in a CIFTI file
"""
return 'CIFTI_STRUCTURE_' + self.primary.upper() + ('' if self.hemisphere == 'both' else ('_' + self.hemisphere.upper()))
@classmethod
def from_string(cls, value, issurface=None):
"""Parses a string to find out which brain structure is being described
:param value: string to be parsed
:param issurface: defines whether the object describes the volume or surface of the brain structure (default: surface if the brain structure is the cortex volume otherwise)
"""
if '_' in value:
items = [val.lower() for val in value.split('_')]
if items[-1] in ['left', 'right', 'both']:
hemisphere = items[-1]
others = items[:-1]
elif items[0] in ['left', 'right', 'both']:
hemisphere = items[0]
others = items[1:]
else:
hemisphere = 'both'
others = items
if others[0] in ['nifti', 'cifti', 'gifti']:
others = others[2:]
primary = '_'.join(others)
else:
low = value.lower()
if 'left' == low[-4:]:
hemisphere = 'left'
primary = low[:-4]
elif 'right' == low[-5:]:
hemisphere = 'right'
primary = low[:-5]
elif 'both' == low[-4:]:
hemisphere = 'both'
primary = low[:-4]
else:
hemisphere = 'both'
primary = low
if issurface is None:
issurface = primary == 'cortex'
if primary == '':
primary = 'all'
return cls(primary, None, hemisphere, 'surface' if issurface else 'volume')
@classmethod
def from_gifti(cls, gifti_obj):
"""
Extracts the brain structure from a GIFTI object
"""
primary_str = 'AnatomicalStructurePrimary'
secondary_str = 'AnatomicalStructureSecondary'
primary = "other"
secondary = None
for obj in [gifti_obj] + gifti_obj.darrays:
if primary_str in obj.meta:
primary = obj.meta[primary_str]
if secondary_str in obj.meta:
secondary = obj.meta[secondary_str]
anatomy = cls.from_string(primary, issurface=True)
anatomy.secondary = None if secondary is None else secondary.lower()
return anatomy
def load(filename, mask_values=(0, np.nan), writable=False) -> Union[DenseCifti, ParcelCifti]:
"""
Reads CIFTI data from the given file
File can be:
- NIFTI file
- GIFTI file
- CIFTI file
:param filename: input filename
:param mask_values: which values are outside of the mask for NIFTI or GIFTI input
:param writable: allow to write to disk
:return: appropriate CIFTI sub-class (parcellated or dense)
"""
possible_extensions = (
tuple(dense_extensions.values()) +
tuple(parcel_extensions.values()) +
tuple(image.ALLOWED_EXTENSIONS) +
('.shape.gii', '.gii')
)
if isinstance(filename, str):
filename = addExt(filename, possible_extensions, fileGroups=image.FILE_GROUPS)
img = nib.load(filename)
else:
img = filename
if isinstance(img, nib.Cifti2Image):
return Cifti.from_cifti(img, writable=writable)
if isinstance(img, nib.GiftiImage):
if writable:
raise ValueError("Can not open GIFTI file in writable mode")
return Cifti.from_gifti(img, mask_values)
try:
vol_img = image.Image(img)
except ValueError:
raise ValueError(f"I do not know how to convert {type(img)} into greyordinates (from {filename})")
if writable:
raise ValueError("Can not open NIFTI file in writable mode")
return Cifti.from_image(vol_img, mask_values)
...@@ -30,6 +30,7 @@ specification: ...@@ -30,6 +30,7 @@ specification:
NIFTI_XFORM_ALIGNED_ANAT NIFTI_XFORM_ALIGNED_ANAT
NIFTI_XFORM_TALAIRACH NIFTI_XFORM_TALAIRACH
NIFTI_XFORM_MNI_152 NIFTI_XFORM_MNI_152
NIFTI_XFORM_TEMPLATE_OTHER
""" """
...@@ -81,7 +82,14 @@ NIFTI_XFORM_MNI_152 = 4 ...@@ -81,7 +82,14 @@ NIFTI_XFORM_MNI_152 = 4
"""MNI 152 normalized coordinates.""" """MNI 152 normalized coordinates."""
NIFTI_XFORM_ANALYZE = 5 NIFTI_XFORM_TEMPLATE_OTHER = 5
"""Coordinates aligned to some template that is not MNI152 or Talairach.
See https://www.nitrc.org/forum/message.php?msg_id=26394 for details.
"""
NIFTI_XFORM_ANALYZE = 6
"""Code which indicates that this is an ANALYZE image, not a NIFTI image. """ """Code which indicates that this is an ANALYZE image, not a NIFTI image. """
...@@ -98,6 +106,36 @@ NIFTI_UNITS_PPM = 40 ...@@ -98,6 +106,36 @@ NIFTI_UNITS_PPM = 40
NIFTI_UNITS_RADS = 48 NIFTI_UNITS_RADS = 48
# NIFTI datatype codes
NIFTI_DT_NONE = 0
NIFTI_DT_UNKNOWN = 0
NIFTI_DT_BINARY = 1
NIFTI_DT_UNSIGNED_CHAR = 2
NIFTI_DT_SIGNED_SHORT = 4
NIFTI_DT_SIGNED_INT = 8
NIFTI_DT_FLOAT = 16
NIFTI_DT_COMPLEX = 32
NIFTI_DT_DOUBLE = 64
NIFTI_DT_RGB = 128
NIFTI_DT_ALL = 255
NIFTI_DT_UINT8 = 2
NIFTI_DT_INT16 = 4
NIFTI_DT_INT32 = 8
NIFTI_DT_FLOAT32 = 16
NIFTI_DT_COMPLEX64 = 32
NIFTI_DT_FLOAT64 = 64
NIFTI_DT_RGB24 = 128
NIFTI_DT_INT8 = 256
NIFTI_DT_UINT16 = 512
NIFTI_DT_UINT32 = 768
NIFTI_DT_INT64 = 1024
NIFTI_DT_UINT64 = 1280
NIFTI_DT_FLOAT128 = 1536
NIFTI_DT_COMPLEX128 = 1792
NIFTI_DT_COMPLEX256 = 2048
NIFTI_DT_RGBA32 = 2304
# NIFTI file intent codes # NIFTI file intent codes
NIFTI_INTENT_NONE = 0 NIFTI_INTENT_NONE = 0
NIFTI_INTENT_CORREL = 2 NIFTI_INTENT_CORREL = 2
......
...@@ -29,22 +29,56 @@ import os ...@@ -29,22 +29,56 @@ import os
import os.path as op import os.path as op
import subprocess as sp import subprocess as sp
import re import re
import sys
import glob import glob
import json import json
import shlex
import shutil
import logging import logging
import binascii
import numpy as np
import nibabel as nib import nibabel as nib
import fsl.utils.tempdir as tempdir import fsl.utils.tempdir as tempdir
import fsl.utils.memoize as memoize import fsl.utils.memoize as memoize
import fsl.data.image as fslimage import fsl.utils.platform as fslplatform
import fsl.data.image as fslimage
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
MIN_DCM2NIIX_VERSION = (1, 0, 2017, 12, 15) MIN_DCM2NIIX_VERSION = (1, 0, 2017, 12, 15)
"""Minimum version of dcm2niix that is required for this module to work. """ """Minimum version of ``dcm2niix`` that is required for this module to work.
"""
CRC_DCM2NIIX_VERSION = (1, 0, 2019, 9, 2)
"""For versions of ``dcm2niix`` orf this version or newer, the ``-n`` flag,
used to convert a single DICOM series, requires that a CRC checksum
identifying the series be passed (see the :func:`seriesCRC`
function). Versions prior to this require the series number to be passed.
"""
def dcm2niix() -> str:
"""Tries to find an absolute path to the ``dcm2niix`` command. Returns
``'dcm2niix'`` (unqualified) if a specific executable cannot be found.
"""
fsldir = fslplatform.platform.fsldir
candidates = [
shutil.which('dcm2niix')
]
if fsldir is not None:
candidates.insert(0, op.join(fsldir, 'bin', 'dcm2niix'))
for c in candidates:
if c is not None and op.exists(c):
return c
return 'dcm2niix'
class DicomImage(fslimage.Image): class DicomImage(fslimage.Image):
...@@ -80,12 +114,19 @@ class DicomImage(fslimage.Image): ...@@ -80,12 +114,19 @@ class DicomImage(fslimage.Image):
@memoize.memoize @memoize.memoize
def enabled(): def installedVersion():
"""Returns ``True`` if ``dcm2niix`` is present, and recent enough, """Return a tuple describing the version of ``dcm2niix`` that is installed,
``False`` otherwise. or ``None`` if dcm2niix cannot be found, or its version not parsed.
The returned tuple contains the following fields, all integers:
- Major version number
- Minor version number
- Year
- Month
- Day
""" """
cmd = 'dcm2niix -h' cmd = f'{dcm2niix()} -h'
versionPattern = re.compile(r'v' versionPattern = re.compile(r'v'
r'(?P<major>[0-9]+)\.' r'(?P<major>[0-9]+)\.'
r'(?P<minor>[0-9]+)\.' r'(?P<minor>[0-9]+)\.'
...@@ -102,80 +143,124 @@ def enabled(): ...@@ -102,80 +143,124 @@ def enabled():
match = re.match(versionPattern, word) match = re.match(versionPattern, word)
if match is None: if match is not None:
continue return (int(match.group('major')),
int(match.group('minor')),
int(match.group('year')),
int(match.group('month')),
int(match.group('day')))
installedVersion = ( except Exception as e:
int(match.group('major')), log.debug(f'Error parsing dcm2niix version string: {e}')
int(match.group('minor')), return None
int(match.group('year')),
int(match.group('month')),
int(match.group('day')))
# make sure installed version
# is equal to or newer than
# minimum required version
for iv, mv in zip(installedVersion, MIN_DCM2NIIX_VERSION):
if iv > mv: return True
elif iv < mv: return False
# if we get here, versions are equal def compareVersions(v1, v2):
return True """Compares two ``dcm2niix`` versions ``v1`` and ``v2``. The versions are
assumed to be in the format returned by :func:`installedVersion`.
except Exception as e: :returns: - 1 if ``v1`` is newer than ``v2``
log.debug('Error parsing dcm2niix version string: {}'.format(e)) - -1 if ``v1`` is older than ``v2``
- 0 if ``v1`` the same as ``v2``.
"""
return False for iv1, iv2 in zip(v1, v2):
if iv1 > iv2: return 1
elif iv1 < iv2: return -1
return 0
def enabled():
"""Returns ``True`` if ``dcm2niix`` is present, and recent enough,
``False`` otherwise.
"""
installed = installedVersion()
required = MIN_DCM2NIIX_VERSION
return ((installed is not None) and
(compareVersions(installed, required) >= 0))
def scanDir(dcmdir): def scanDir(dcmdir):
"""Uses ``dcm2niix`` to scans the given DICOM directory, and returns a """Uses the ``dcm2niix -b o`` option to generate a BIDS sidecar JSON
list of dictionaries, one for each data series that was identified. file for each series in the given DICOM directory. Reads them all in,
Each dictionary is populated with some basic metadata about the series. and returns them as a sequence of dicts.
Some additional metadata is added to each dictionary:
- ``DicomDir``: The absolute path to ``dcmdir``
:arg dcmdir: Directory containing DICOM files. :arg dcmdir: Directory containing DICOM series
:returns: A list of dictionaries, each containing metadata about :returns: A list of dicts, each containing the BIDS sidecar JSON
one DICOM data series. metadata for one DICOM series.
""" """
if not enabled(): if not enabled():
raise RuntimeError('dcm2niix is not available or is too old') raise RuntimeError('dcm2niix is not available or is too old')
dcmdir = op.abspath(dcmdir) dcmdir = op.abspath(dcmdir)
cmd = 'dcm2niix -b o -ba n -f %s -o . {}'.format(dcmdir) cmd = f'{dcm2niix()} -b o -ba n -f %s -o . "{dcmdir}"'
snumPattern = re.compile('^[0-9]+') series = []
with tempdir.tempdir() as td: with tempdir.tempdir() as td:
with open(os.devnull, 'wb') as devnull: with open(os.devnull, 'wb') as devnull:
sp.call(cmd.split(), stdout=devnull, stderr=devnull) sp.call(shlex.split(cmd), stdout=devnull, stderr=devnull)
files = glob.glob(op.join(td, '*.json')) files = glob.glob(op.join(td, '*.json'))
if len(files) == 0: if len(files) == 0:
return [] return []
# sort numerically by series number if possible
try:
def sortkey(f):
match = re.match(snumPattern, f)
snum = int(match.group(0))
return snum
files = sorted(files, key=sortkey)
except Exception:
files = sorted(files)
series = []
for fn in files: for fn in files:
with open(fn, 'rt') as f: with open(fn, 'rt') as f:
meta = json.load(f) meta = json.load(f)
meta['DicomDir'] = dcmdir meta['DicomDir'] = dcmdir
# SeriesDescription is not
# guaranteed to be present
if 'SeriesDescription' not in meta:
meta['SeriesDescription'] = meta['SeriesNumber']
series.append(meta) series.append(meta)
return series # sort by series number
def key(s):
return s.get('SeriesNumber', sys.maxsize)
series = list(sorted(series, key=key))
return series
def seriesCRC(series):
"""Calculate a checksum string of the given DICOM series.
The returned string is of the form::
SeriesCRC[.echonumber]
Where ``SeriesCRC`` is an unsigned integer which is the CRC32
checksum of the ``SeriesInstanceUID``, and ``echonumber`` is
the ``EchoNumber`` of the series - this is only present for
multi-echo data, where the series is from the second or subsequent
echos.
:arg series: Dict containing BIDS metadata about a DICOM series,
as returned by :func:`scanDir`.
:returns: String containing a CRC32 checksum for the series.
"""
uid = series.get('SeriesInstanceUID', None)
echo = series.get('EchoNumber', None)
if uid is None:
return None
crc32 = str(binascii.crc32(uid.encode()))
if echo is not None and echo > 1:
crc32 = f'{crc32}.{echo}'
return crc32
def loadSeries(series): def loadSeries(series):
...@@ -192,20 +277,39 @@ def loadSeries(series): ...@@ -192,20 +277,39 @@ def loadSeries(series):
if not enabled(): if not enabled():
raise RuntimeError('dcm2niix is not available or is too old') raise RuntimeError('dcm2niix is not available or is too old')
dcmdir = series['DicomDir'] dcmdir = series['DicomDir']
snum = series['SeriesNumber'] snum = series['SeriesNumber']
desc = series['SeriesDescription'] desc = series['SeriesDescription']
cmd = 'dcm2niix -b n -f %s -z n -o . -n {} {}'.format(snum, dcmdir) version = installedVersion()
with tempdir.tempdir() as td: # Newer versions of dcm2niix
# require a CRC to identify
# series
if compareVersions(version, CRC_DCM2NIIX_VERSION) >= 0:
ident = seriesCRC(series)
with open(os.devnull, 'wb') as devnull: # Older versions require
sp.call(cmd.split(), stdout=devnull, stderr=devnull) # the series number
else:
ident = snum
files = glob.glob(op.join(td, '{}*.nii'.format(snum))) cmd = f'{dcm2niix()} -b n -f %s -z n -o . -n "{ident}" "{dcmdir}"'
images = [nib.load(f) for f in files]
# Force-load images into memory with tempdir.tempdir() as td:
[i.get_data() for i in images]
with open(os.devnull, 'wb') as devnull:
sp.call(shlex.split(cmd), stdout=devnull, stderr=devnull)
files = glob.glob(op.join(td, f'{snum}*.nii'))
images = [nib.load(f, mmap=False) for f in files]
# copy images so nibabel no longer
# refs to the files (as they will
# be deleted), and force-load the
# the image data into memory (to
# avoid any disk accesses due to
# e.g. memmap)
images = [nib.Nifti1Image(np.asanyarray(i.dataobj), None, i.header)
for i in images]
return [DicomImage(i, series, dcmdir, name=desc) for i in images] return [DicomImage(i, series, dcmdir, name=desc) for i in images]
...@@ -3,11 +3,19 @@ ...@@ -3,11 +3,19 @@
# dtifit.py - The DTIFitTensor class, and some related utility functions. # dtifit.py - The DTIFitTensor class, and some related utility functions.
# #
# Author: Paul McCarthy <pauldmccarthy@gmail.com> # Author: Paul McCarthy <pauldmccarthy@gmail.com>
# Author: Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
# #
"""This module provides the :class:`.DTIFitTensor` class, which encapsulates """This module provides the :class:`.DTIFitTensor` class, which encapsulates
the diffusion tensor data generated by the FSL ``dtifit`` tool. the diffusion tensor data generated by the FSL ``dtifit`` tool.
The following utility functions are also defined: There are also conversion tools between the diffusion tensors defined in 3 formats:
* (..., 3, 3) array with the full diffusion tensor
* (..., 6) array with the unique components (Dxx, Dxy, Dxz, Dyy, Dyz, Dzz)
* Tuple with the eigenvectors and eigenvalues (V1, V2, V3, L1, L2, L3)
Finally the following utility functions are also defined:
.. autosummary:: .. autosummary::
:nosignatures: :nosignatures:
...@@ -33,6 +41,123 @@ from . import image as fslimage ...@@ -33,6 +41,123 @@ from . import image as fslimage
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def eigendecompositionToTensor(V1, V2, V3, L1, L2, L3):
"""
Converts the eigenvalues/eigenvectors into a 3x3 diffusion tensor
:param V1: (..., 3) shaped array with the first eigenvector
:param V2: (..., 3) shaped array with the second eigenvector
:param V3: (..., 3) shaped array with the third eigenvector
:param L1: (..., ) shaped array with the first eigenvalue
:param L2: (..., ) shaped array with the second eigenvalue
:param L3: (..., ) shaped array with the third eigenvalue
:return: (..., 3, 3) array with the diffusion tensor
"""
check_shape = L1.shape
for eigen_value in (L2, L3):
if eigen_value.shape != check_shape:
raise ValueError("Not all eigenvalues have the same shape")
for eigen_vector in (V1, V2, V3):
if eigen_vector.shape != eigen_value.shape + (3, ):
raise ValueError("Not all eigenvectors have the same shape as the eigenvalues")
return (
L1[..., None, None] * V1[..., None, :] * V1[..., :, None] +
L2[..., None, None] * V2[..., None, :] * V2[..., :, None] +
L3[..., None, None] * V3[..., None, :] * V3[..., :, None]
)
def tensorToEigendecomposition(matrices):
"""
Decomposes the 3x3 diffusion tensor into eigenvalues and eigenvectors
:param matrices: (..., 3, 3) array-like with diffusion tensor
:return: Tuple containing the eigenvectors and eigenvalues (V1, V2, V3, L1, L2, L3)
"""
matrices = np.asanyarray(matrices)
if matrices.shape[-2:] != (3, 3):
raise ValueError("Expected 3x3 diffusion tensors")
shape = matrices.shape[:-2]
nvoxels = np.prod(shape)
# Calculate the eigenvectors and
# values on all of those matrices
flat_matrices = matrices.reshape((-1, 3, 3))
vals, vecs = npla.eigh(flat_matrices)
vecShape = shape + (3, )
l1 = vals[:, 2] .reshape(shape)
l2 = vals[:, 1] .reshape(shape)
l3 = vals[:, 0] .reshape(shape)
v1 = vecs[:, :, 2].reshape(vecShape)
v2 = vecs[:, :, 1].reshape(vecShape)
v3 = vecs[:, :, 0].reshape(vecShape)
return v1, v2, v3, l1, l2, l3
def tensorToComponents(matrices):
"""
Extracts the 6 unique components from a 3x3 diffusion tensor
:param matrices: (..., 3, 3) array-like with diffusion tensors
:return: (..., 6) array with the unique components sorted like Dxx, Dxy, Dxz, Dyy, Dyz, Dzz
"""
matrices = np.asanyarray(matrices)
if matrices.shape[-2:] != (3, 3):
raise ValueError("Expected 3x3 diffusion tensors")
return np.stack([
matrices[..., 0, 0],
matrices[..., 0, 1],
matrices[..., 0, 2],
matrices[..., 1, 1],
matrices[..., 1, 2],
matrices[..., 2, 2],
], -1)
def componentsToTensor(components):
"""
Creates 3x3 diffusion tensors from the 6 unique components
:param components: (..., 6) array-like with Dxx, Dxy, Dxz, Dyy, Dyz, Dzz
:return: (..., 3, 3) array with the diffusion tensors
"""
components = np.asanyarray(components)
if components.shape[-1] != 6:
raise ValueError("Expected 6 unique components of diffusion tensor")
first = np.stack([components[..., index] for index in (0, 1, 2)], -1)
second = np.stack([components[..., index] for index in (1, 3, 4)], -1)
third = np.stack([components[..., index] for index in (2, 4, 5)], -1)
return np.stack([first, second, third], -1)
def eigendecompositionToComponents(V1, V2, V3, L1, L2, L3):
"""
Converts the eigenvalues/eigenvectors into the 6 unique components of the diffusion tensor
:param V1: (..., 3) shaped array with the first eigenvector
:param V2: (..., 3) shaped array with the second eigenvector
:param V3: (..., 3) shaped array with the third eigenvector
:param L1: (..., ) shaped array with the first eigenvalue
:param L2: (..., ) shaped array with the second eigenvalue
:param L3: (..., ) shaped array with the third eigenvalue
:return: (..., 6) array with the unique components sorted like Dxx, Dxy, Dxz, Dyy, Dyz, Dzz
"""
return tensorToComponents(eigendecompositionToTensor(V1, V2, V3, L1, L2, L3))
def componentsToEigendecomposition(components):
"""
Decomposes diffusion tensor defined by its 6 unique components
:param components: (..., 6) array-like with Dxx, Dxy, Dxz, Dyy, Dyz, Dzz
:return: Tuple containing the eigenvectors and eigenvalues (V1, V2, V3, L1, L2, L3)
"""
return tensorToEigendecomposition(componentsToTensor(components))
def getDTIFitDataPrefix(path): def getDTIFitDataPrefix(path):
"""Returns the prefix (a.k,a, base name) used for the ``dtifit`` file """Returns the prefix (a.k,a, base name) used for the ``dtifit`` file
names in the given directory, or ``None`` if the ``dtifit`` files could names in the given directory, or ``None`` if the ``dtifit`` files could
...@@ -117,53 +242,7 @@ def decomposeTensorMatrix(data): ...@@ -117,53 +242,7 @@ def decomposeTensorMatrix(data):
:returns: A tuple containing the principal eigenvectors and :returns: A tuple containing the principal eigenvectors and
eigenvalues of the tensor matrix. eigenvalues of the tensor matrix.
""" """
return componentsToEigendecomposition(data)
# The image contains 6 volumes, corresponding
# to the Dxx, Dxy, Dxz, Dyy, Dyz, Dzz elements
# of the tensor matrix, at each voxel.
#
# We need to re-organise this into a series of
# complete 3x3 tensor matrices, one for each
# voxel.
shape = data.shape[:3]
nvoxels = np.prod(shape)
matrices = np.zeros((nvoxels, 3, 3), dtype=np.float32)
# Copy the tensor matrix elements
# into their respective locations
matrices[:, 0, 0] = data[..., 0].flat
matrices[:, 0, 1] = data[..., 1].flat
matrices[:, 1, 0] = data[..., 1].flat
matrices[:, 0, 2] = data[..., 2].flat
matrices[:, 2, 0] = data[..., 2].flat
matrices[:, 1, 1] = data[..., 3].flat
matrices[:, 1, 2] = data[..., 4].flat
matrices[:, 2, 1] = data[..., 4].flat
matrices[:, 2, 2] = data[..., 5].flat
# Calculate the eigenvectors and
# values on all of those matrices
vals, vecs = npla.eig(matrices)
vecShape = list(shape) + [3]
# Grr, np.linalg.eig does not
# sort the eigenvalues/vectors,
# so we have to do it ourselves.
order = vals.argsort(axis=1)
i = np.arange(nvoxels)[:, np.newaxis]
vecs = vecs.transpose(0, 2, 1)
vals = vals[i, order]
vecs = vecs[i, order, :]
l1 = vals[:, 2] .reshape(shape)
l2 = vals[:, 1] .reshape(shape)
l3 = vals[:, 0] .reshape(shape)
v1 = vecs[:, 2, :].reshape(vecShape)
v2 = vecs[:, 1, :].reshape(vecShape)
v3 = vecs[:, 0, :].reshape(vecShape)
return v1, v2, v3, l1, l2, l3
class DTIFitTensor(fslimage.Nifti): class DTIFitTensor(fslimage.Nifti):
......
...@@ -22,9 +22,12 @@ following functions are provided: ...@@ -22,9 +22,12 @@ following functions are provided:
isFirstLevelAnalysis isFirstLevelAnalysis
loadDesign loadDesign
loadContrasts loadContrasts
loadFTests
loadFsf
loadSettings loadSettings
getThresholds getThresholds
loadClusterResults loadClusterResults
loadFEATDesignFile
The following functions return the names of various files of interest: The following functions return the names of various files of interest:
...@@ -38,20 +41,22 @@ The following functions return the names of various files of interest: ...@@ -38,20 +41,22 @@ The following functions return the names of various files of interest:
getPEFile getPEFile
getCOPEFile getCOPEFile
getZStatFile getZStatFile
getZFStatFile
getClusterMaskFile getClusterMaskFile
getFClusterMaskFile
""" """
import collections import collections
import logging import io
import os.path as op import logging
import numpy as np import os.path as op
import numpy as np
import fsl.utils.path as fslpath import fsl.utils.path as fslpath
import fsl.utils.transform as transform import fsl.transform.affine as affine
from . import image as fslimage
from . import image as fslimage from . import featdesign
from . import featdesign
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -166,70 +171,83 @@ def loadContrasts(featdir): ...@@ -166,70 +171,83 @@ def loadContrasts(featdir):
:arg featdir: A FEAT directory. :arg featdir: A FEAT directory.
""" """
matrix = None filename = op.join(featdir, 'design.con')
numContrasts = 0
names = {}
designcon = op.join(featdir, 'design.con')
log.debug('Loading FEAT contrasts from {}'.format(designcon)) log.debug('Loading FEAT contrasts from %s', filename)
with open(designcon, 'rt') as f: try:
designcon = loadFEATDesignFile(filename)
contrasts = np.genfromtxt(io.StringIO(designcon['Matrix']), ndmin=2)
numContrasts = int(designcon['NumContrasts'])
names = []
while True: if numContrasts != contrasts.shape[0]:
line = f.readline().strip() raise RuntimeError(f'Matrix shape {contrasts.shape} does not '
f'match number of contrasts {numContrasts}')
if line.startswith('/ContrastName'): contrasts = [list(row) for row in contrasts]
tkns = line.split(None, 1)
num = [c for c in tkns[0] if c.isdigit()]
num = int(''.join(num))
# The /ContrastName field may not for i in range(numContrasts):
# actually have a name specified cname = designcon.get(f'ContrastName{i + 1}', '')
if len(tkns) > 1: if cname == '':
name = tkns[1].strip() cname = f'{i + 1}'
names[num] = name names.append(cname)
elif line.startswith('/NumContrasts'): except Exception as e:
numContrasts = int(line.split()[1]) log.debug('Error reading %s: %s', filename, e, exc_info=True)
raise RuntimeError(f'{filename} does not appear '
'to be a valid design.con file') from e
elif line == '/Matrix': return names, contrasts
break
matrix = np.loadtxt(f, ndmin=2)
if matrix is None or \ def loadFTests(featdir):
numContrasts != matrix.shape[0]: """Loads F-tests from a FEAT directory. Returns a list of f-test vectors
raise RuntimeError('{} does not appear to be a ' (each of which is a list itself), where each vector contains a 1 or a 0
'valid design.con file'.format(designcon)) denoting the contrasts included in the F-test.
# Fill in any missing contrast names :arg featdir: A FEAT directory.
if len(names) != numContrasts: """
for i in range(numContrasts):
if i + 1 not in names:
names[i + 1] = str(i + 1)
names = [names[c + 1] for c in range(numContrasts)] filename = op.join(featdir, 'design.fts')
contrasts = []
for row in matrix: if not op.exists(filename):
contrasts.append(list(row)) return []
return names, contrasts log.debug('Loading FEAT F-tests from %s', filename)
try:
desfts = loadFEATDesignFile(filename)
ftests = np.genfromtxt(io.StringIO(desfts['Matrix']), ndmin=2)
ncols = int(desfts['NumWaves'])
nrows = int(desfts['NumContrasts'])
def loadSettings(featdir): if ftests.shape != (nrows, ncols):
"""Loads the analysis settings from a FEAT directory. raise RuntimeError(f'Matrix shape {ftests.shape} does not match '
f'number of EVs/FTests ({ncols}, {nrows})')
Returns a dict containing the settings specified in the ``design.fsf`` ftests = [list(row) for row in ftests]
file within the directory
:arg featdir: A FEAT directory. except Exception as e:
log.debug('Error reading %s: %s', filename, e, exc_info=True)
raise RuntimeError(f'{filename} does not appear '
'to be a valid design.fts file') from e
return ftests
def loadFsf(designfsf):
"""Loads the analysis settings from a text file (.fsf) used to configure
FEAT.
Returns a dict containing the settings specified in the file
:arg designfsf: A .fsf file.
""" """
settings = collections.OrderedDict() settings = collections.OrderedDict()
designfsf = op.join(featdir, 'design.fsf')
log.debug('Loading FEAT settings from {}'.format(designfsf)) log.debug('Loading FEAT settings from %s', designfsf)
with open(designfsf, 'rt') as f: with open(designfsf, 'rt') as f:
...@@ -252,6 +270,20 @@ def loadSettings(featdir): ...@@ -252,6 +270,20 @@ def loadSettings(featdir):
return settings return settings
def loadSettings(featdir):
"""Loads the analysis settings from a FEAT directory.
Returns a dict containing the settings specified in the ``design.fsf``
file within the directory
:arg featdir: A FEAT directory.
"""
designfsf = op.join(featdir, 'design.fsf')
return loadFsf(designfsf)
def loadDesign(featdir, settings): def loadDesign(featdir, settings):
"""Loads the design matrix from a FEAT directory. """Loads the design matrix from a FEAT directory.
...@@ -297,19 +329,22 @@ def isFirstLevelAnalysis(settings): ...@@ -297,19 +329,22 @@ def isFirstLevelAnalysis(settings):
return settings['level'] == '1' return settings['level'] == '1'
def loadClusterResults(featdir, settings, contrast): def loadClusterResults(featdir, settings, contrast, ftest=False):
"""If cluster thresholding was used in the FEAT analysis, this function """If cluster thresholding was used in the FEAT analysis, this function
will load and return the cluster results for the specified (0-indexed) will load and return the cluster results for the specified (0-indexed)
contrast number. contrast or f-test.
If there are no cluster results for the given contrast, ``None`` is If there are no cluster results for the given contrast/f-test, ``None``
returned. is returned.
An error will be raised if the cluster file cannot be parsed. An error will be raised if the cluster file cannot be parsed.
:arg featdir: A FEAT directory. :arg featdir: A FEAT directory.
:arg settings: A FEAT settings dictionary. :arg settings: A FEAT settings dictionary.
:arg contrast: 0-indexed contrast number. :arg contrast: 0-indexed contrast or f-test number.
:arg ftest: If ``False`` (default), return cluster results for
the contrast numbered ``contrast``. Otherwise, return
cluster results for the f-test numbered ``contrast``.
:returns: A list of ``Cluster`` instances, each of which contains :returns: A list of ``Cluster`` instances, each of which contains
information about one cluster. A ``Cluster`` object has the information about one cluster. A ``Cluster`` object has the
...@@ -330,11 +365,16 @@ def loadClusterResults(featdir, settings, contrast): ...@@ -330,11 +365,16 @@ def loadClusterResults(featdir, settings, contrast):
gravity. gravity.
``zcogz`` Z voxel coordinate of cluster centre of ``zcogz`` Z voxel coordinate of cluster centre of
gravity. gravity.
``copemax`` Maximum COPE value in cluster. ``copemax`` Maximum COPE value in cluster (not
``copemaxx`` X voxel coordinate of maximum COPE value. present for f-tests).
``copemaxx`` X voxel coordinate of maximum COPE value
(not present for f-tests).
``copemaxy`` Y voxel coordinate of maximum COPE value. ``copemaxy`` Y voxel coordinate of maximum COPE value.
(not present for f-tests).
``copemaxz`` Z voxel coordinate of maximum COPE value. ``copemaxz`` Z voxel coordinate of maximum COPE value.
(not present for f-tests).
``copemean`` Mean COPE of all voxels in the cluster. ``copemean`` Mean COPE of all voxels in the cluster.
(not present for f-tests).
============ ========================================= ============ =========================================
""" """
...@@ -344,8 +384,11 @@ def loadClusterResults(featdir, settings, contrast): ...@@ -344,8 +384,11 @@ def loadClusterResults(featdir, settings, contrast):
# the ZMax/COG etc coordinates # the ZMax/COG etc coordinates
# are usually in voxel coordinates # are usually in voxel coordinates
coordXform = np.eye(4) coordXform = np.eye(4)
clusterFile = op.join(
featdir, 'cluster_zstat{}.txt'.format(contrast + 1)) if ftest: prefix = 'cluster_zfstat'
else: prefix = 'cluster_zstat'
clusterFile = op.join(featdir, f'{prefix}{contrast + 1}.txt')
if not op.exists(clusterFile): if not op.exists(clusterFile):
...@@ -354,22 +397,16 @@ def loadClusterResults(featdir, settings, contrast): ...@@ -354,22 +397,16 @@ def loadClusterResults(featdir, settings, contrast):
# the cluster file will instead be called # the cluster file will instead be called
# 'cluster_zstatX_std.txt', so we'd better # 'cluster_zstatX_std.txt', so we'd better
# check for that too. # check for that too.
clusterFile = op.join( clusterFile = op.join(featdir, f'{prefix}{contrast + 1}_std.txt')
featdir, 'cluster_zstat{}_std.txt'.format(contrast + 1))
if not op.exists(clusterFile): if not op.exists(clusterFile):
return None return None
# In higher levle analysis run in some standard # In higher level analysis run in some standard
# space, the cluster coordinates are in standard # space, the cluster coordinates are in standard
# space. We transform them to voxel coordinates. # space. We transform them to voxel coordinates.
# later on. # later on.
coordXform = fslimage.Image( coordXform = fslimage.Image(getDataFile(featdir)).worldToVoxMat
getDataFile(featdir),
loadData=False).worldToVoxMat
log.debug('Loading cluster results for contrast {} from {}'.format(
contrast, clusterFile))
# The cluster.txt file is converted # The cluster.txt file is converted
# into a list of Cluster objects, # into a list of Cluster objects,
...@@ -385,6 +422,20 @@ def loadClusterResults(featdir, settings, contrast): ...@@ -385,6 +422,20 @@ def loadClusterResults(featdir, settings, contrast):
setattr(self, attrName, val) setattr(self, attrName, val)
# if cluster thresholding was not used,
# the cluster table will not contain
# P values.
if not hasattr(self, 'p'): self.p = 1.0
if not hasattr(self, 'logp'): self.logp = 0.0
# F-test cluster results will not have
# COPE-* results
if not hasattr(self, 'copemax'): self.copemax = np.nan
if not hasattr(self, 'copemaxx'): self.copemaxx = np.nan
if not hasattr(self, 'copemaxy'): self.copemaxy = np.nan
if not hasattr(self, 'copemaxz'): self.copemaxz = np.nan
if not hasattr(self, 'copemean'): self.copemean = np.nan
# This dict provides a mapping between # This dict provides a mapping between
# Cluster object attribute names, and # Cluster object attribute names, and
# the corresponding column name in the # the corresponding column name in the
...@@ -416,10 +467,9 @@ def loadClusterResults(featdir, settings, contrast): ...@@ -416,10 +467,9 @@ def loadClusterResults(featdir, settings, contrast):
'COPE-MAX Z (mm)' : 'copemaxz', 'COPE-MAX Z (mm)' : 'copemaxz',
'COPE-MEAN' : 'copemean'} 'COPE-MEAN' : 'copemean'}
# An error will be raised if the log.debug('Loading cluster results for contrast %s from %s',
# cluster file does not exist (e.g. contrast, clusterFile)
# if the specified contrast index
# is invalid)
with open(clusterFile, 'rt') as f: with open(clusterFile, 'rt') as f:
# Get every line in the file, # Get every line in the file,
...@@ -441,12 +491,11 @@ def loadClusterResults(featdir, settings, contrast): ...@@ -441,12 +491,11 @@ def loadClusterResults(featdir, settings, contrast):
colNames = colNames.split('\t') colNames = colNames.split('\t')
clusterLines = [cl .split('\t') for cl in clusterLines] clusterLines = [cl .split('\t') for cl in clusterLines]
# Turn each cluster line into a # Turn each cluster line into a Cluster
# Cluster instance. An error will # instance. An error will be raised if the
# be raised if the columm names # columm names are unrecognised (i.e. not
# are unrecognised (i.e. not in # in the colmap above), or if the file is
# the colmap above), or if the # poorly formed.
# file is poorly formed.
clusters = [Cluster(**dict(zip(colNames, cl))) for cl in clusterLines] clusters = [Cluster(**dict(zip(colNames, cl))) for cl in clusterLines]
# Make sure all coordinates are in voxels - # Make sure all coordinates are in voxels -
...@@ -461,17 +510,51 @@ def loadClusterResults(featdir, settings, contrast): ...@@ -461,17 +510,51 @@ def loadClusterResults(featdir, settings, contrast):
zcog = [c.zcogx, c.zcogy, c.zcogz] zcog = [c.zcogx, c.zcogy, c.zcogz]
copemax = [c.copemaxx, c.copemaxy, c.copemaxz] copemax = [c.copemaxx, c.copemaxy, c.copemaxz]
zmax = transform.transform([zmax], coordXform)[0].round() zmax = affine.transform([zmax], coordXform)[0]
zcog = transform.transform([zcog], coordXform)[0].round() zcog = affine.transform([zcog], coordXform)[0]
copemax = transform.transform([copemax], coordXform)[0].round() copemax = affine.transform([copemax], coordXform)[0]
c.zmaxx, c.zmaxy, c.zmaxz = zmax c.zmaxx, c.zmaxy, c.zmaxz = zmax
c.zcogx, c.zcogy, c.zcogz = zcog c.zcogx, c.zcogy, c.zcogz = zcog
c.copemax, c.copemaxy, c.copemaxz = copemax c.copemaxx, c.copemaxy, c.copemaxz = copemax
return clusters return clusters
def loadFEATDesignFile(filename):
"""Load a FEAT design file, e.g. ``design.mat``, ``design.con``, ``design.fts``.
These files contain key-value pairs, and are formatted according to an
undocumented structure where each key is of the form "/KeyName", and is
followed immediately by a whitespace character, and then the value.
:arg filename: File to load
:returns: A dictionary of key-value pairs. The values are all left
as strings.
"""
fields = {}
with open(filename, 'rt') as f:
content = f.read()
content = content.split('/')
for line in content:
line = line.strip()
if line == '':
continue
tokens = line.split(maxsplit=1)
if len(tokens) == 1:
name, value = tokens[0], ''
else:
name, value = tokens
fields[name] = value
return fields
def getDataFile(featdir): def getDataFile(featdir):
"""Returns the name of the file in the FEAT directory which contains """Returns the name of the file in the FEAT directory which contains
the model input data (typically called ``filtered_func_data.nii.gz``). the model input data (typically called ``filtered_func_data.nii.gz``).
...@@ -515,7 +598,7 @@ def getPEFile(featdir, ev): ...@@ -515,7 +598,7 @@ def getPEFile(featdir, ev):
:arg featdir: A FEAT directory. :arg featdir: A FEAT directory.
:arg ev: The EV number (0-indexed). :arg ev: The EV number (0-indexed).
""" """
pefile = op.join(featdir, 'stats', 'pe{}'.format(ev + 1)) pefile = op.join(featdir, 'stats', f'pe{ev + 1}')
return fslimage.addExt(pefile, mustExist=True) return fslimage.addExt(pefile, mustExist=True)
...@@ -527,7 +610,7 @@ def getCOPEFile(featdir, contrast): ...@@ -527,7 +610,7 @@ def getCOPEFile(featdir, contrast):
:arg featdir: A FEAT directory. :arg featdir: A FEAT directory.
:arg contrast: The contrast number (0-indexed). :arg contrast: The contrast number (0-indexed).
""" """
copefile = op.join(featdir, 'stats', 'cope{}'.format(contrast + 1)) copefile = op.join(featdir, 'stats', f'cope{contrast + 1}')
return fslimage.addExt(copefile, mustExist=True) return fslimage.addExt(copefile, mustExist=True)
...@@ -539,10 +622,22 @@ def getZStatFile(featdir, contrast): ...@@ -539,10 +622,22 @@ def getZStatFile(featdir, contrast):
:arg featdir: A FEAT directory. :arg featdir: A FEAT directory.
:arg contrast: The contrast number (0-indexed). :arg contrast: The contrast number (0-indexed).
""" """
zfile = op.join(featdir, 'stats', 'zstat{}'.format(contrast + 1)) zfile = op.join(featdir, 'stats', f'zstat{contrast + 1}')
return fslimage.addExt(zfile, mustExist=True) return fslimage.addExt(zfile, mustExist=True)
def getZFStatFile(featdir, ftest):
"""Returns the path of the Z-statistic file for the specified F-test.
Raises a :exc:`~fsl.utils.path.PathError` if the file does not exist.
:arg featdir: A FEAT directory.
:arg ftest: The F-test number (0-indexed).
"""
zffile = op.join(featdir, 'stats', f'zfstat{ftest + 1}')
return fslimage.addExt(zffile, mustExist=True)
def getClusterMaskFile(featdir, contrast): def getClusterMaskFile(featdir, contrast):
"""Returns the path of the cluster mask file for the specified contrast. """Returns the path of the cluster mask file for the specified contrast.
...@@ -551,5 +646,17 @@ def getClusterMaskFile(featdir, contrast): ...@@ -551,5 +646,17 @@ def getClusterMaskFile(featdir, contrast):
:arg featdir: A FEAT directory. :arg featdir: A FEAT directory.
:arg contrast: The contrast number (0-indexed). :arg contrast: The contrast number (0-indexed).
""" """
mfile = op.join(featdir, 'cluster_mask_zstat{}'.format(contrast + 1)) mfile = op.join(featdir, f'cluster_mask_zstat{contrast + 1}')
return fslimage.addExt(mfile, mustExist=True)
def getFClusterMaskFile(featdir, ftest):
"""Returns the path of the cluster mask file for the specified f-test.
Raises a :exc:`~fsl.utils.path.PathError` if the file does not exist.
:arg featdir: A FEAT directory.
:arg contrast: The f-test number (0-indexed).
"""
mfile = op.join(featdir, f'cluster_mask_zfstat{ftest + 1}')
return fslimage.addExt(mfile, mustExist=True) return fslimage.addExt(mfile, mustExist=True)
...@@ -160,7 +160,7 @@ class FEATFSFDesign(object): ...@@ -160,7 +160,7 @@ class FEATFSFDesign(object):
# Print a warning if we're # Print a warning if we're
# using an old version of FEAT # using an old version of FEAT
if version < 6: if version < 6:
log.warning('Unsupported FEAT version: {}'.format(version)) log.warning('Unsupported FEAT version: %s', version)
# We need to parse the EVS a bit # We need to parse the EVS a bit
# differently depending on whether # differently depending on whether
...@@ -210,15 +210,100 @@ class FEATFSFDesign(object): ...@@ -210,15 +210,100 @@ class FEATFSFDesign(object):
continue continue
if (not self.__loadVoxEVs) or (ev.filename is None): if (not self.__loadVoxEVs) or (ev.filename is None):
log.warning('Voxel EV image missing ' log.warning('Voxel EV image missing for ev %s', ev.index)
'for ev {}'.format(ev.index))
continue continue
design[:, ev.index] = ev.image[x, y, z, :] design[:, ev.index] = ev.getData(x, y, z)
return design return design
class VoxelwiseEVMixin(object):
"""Mixin class for voxelwise EVs.
``VoxelwiseEVMixin`` instances contain the following attributes:
============ ======================================================
``filename`` Path to the image file containing the data for this EV
``image`` Reference to the :class:`.Image` object
============ ======================================================
Some FSL tools (e.g. `PNM
<https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/PNM>`_) generate *compressed*
voxelwise EVs, where there is only one voxel per slice. For example,
if the input data has shape ``(x, y, z, t)``, and slices are acquired
along the Z plane, voxelwise EV files generated by PNM will have shape
``(1, 1, z, t)``.
Therefore, using the :meth:`getData` method is preferable to accessing
the :meth:`image` directly, as ``getData`` will check for compressed
images, and adjust the voxel coordinates accordingly.
"""
def __init__(self, filename):
"""Create a ``VoxelwiseEVMixin``.
:arg filename: Path to the file containing the data for this
``VoxelwiseEV``.
"""
if op.exists(filename):
self.__filename = filename
else:
log.warning('Voxelwise EV file does not exist: %s', filename)
self.__filename = None
self.__image = None
def __del__(self):
"""Clears any reference to the voxelwise EV image. """
self.__image = None
@property
def filename(self):
"""Returns the path to the image file containing the data for this EV.
"""
return self.__filename
@property
def image(self):
"""Returns the :class:`.Image` containing the voxelwise EV data. """
if self.__filename is None:
return None
if self.__image is not None:
return self.__image
self.__image = fslimage.Image(self.__filename, mmap=False)
return self.__image
def getData(self, x, y, z):
"""Returns the data at the specified voxel, taking into account
compressed voxelwise EVs.
"""
image = self.image
if image is None:
return None
dx, dy, dz = image.shape[:3]
# "Compressed" images have one voxel
# per slice, i.e. they have shape
# [1, 1, nslices, ntimepoints] (if
# Z is the slice plane).
if sum((dx == 1, dy == 1, dz == 1)) == 2:
if dx == 1: x = 0
if dy == 1: y = 0
if dz == 1: z = 0
return image[x, y, z, :]
class EV(object): class EV(object):
"""Class representing an explanatory variable in a FEAT design matrix. """Class representing an explanatory variable in a FEAT design matrix.
...@@ -276,7 +361,7 @@ class BasisFunctionEV(NormalEV): ...@@ -276,7 +361,7 @@ class BasisFunctionEV(NormalEV):
pass pass
class VoxelwiseEV(NormalEV): class VoxelwiseEV(NormalEV, VoxelwiseEVMixin):
"""Class representing an EV with different values for each voxel in the """Class representing an EV with different values for each voxel in the
analysis. analysis.
...@@ -308,34 +393,7 @@ class VoxelwiseEV(NormalEV): ...@@ -308,34 +393,7 @@ class VoxelwiseEV(NormalEV):
``VoxelwiseEV``. ``VoxelwiseEV``.
""" """
NormalEV.__init__(self, realIdx, origIdx, title) NormalEV.__init__(self, realIdx, origIdx, title)
VoxelwiseEVMixin.__init__(self, filename)
if op.exists(filename):
self.filename = filename
else:
log.warning('Voxelwise EV file does not '
'exist: {}'.format(filename))
self.filename = None
self.__image = None
def __del__(self):
"""Clears any reference to the voxelwise EV image. """
self.__image = None
@property
def image(self):
"""Returns the :class:`.Image` containing the voxelwise EV data. """
if self.filename is None:
return None
if self.__image is not None:
return self.__image
self.__image = fslimage.Image(self.filename, mmap=False)
return self.__image
class ConfoundEV(EV): class ConfoundEV(EV):
...@@ -386,7 +444,7 @@ class MotionParameterEV(EV): ...@@ -386,7 +444,7 @@ class MotionParameterEV(EV):
self.motionIndex = motionIndex self.motionIndex = motionIndex
class VoxelwiseConfoundEV(EV): class VoxelwiseConfoundEV(EV, VoxelwiseEVMixin):
"""Class representing a voxelwise confound EV. """Class representing a voxelwise confound EV.
``VoxelwiseConfoundEV`` instances contain the following attributes (in ``VoxelwiseConfoundEV`` instances contain the following attributes (in
...@@ -396,6 +454,7 @@ class VoxelwiseConfoundEV(EV): ...@@ -396,6 +454,7 @@ class VoxelwiseConfoundEV(EV):
``voxIndex`` Index of this ``VoxelwiseConfoundEV`` (starting from 0) in ``voxIndex`` Index of this ``VoxelwiseConfoundEV`` (starting from 0) in
relation to all other voxelwise confound EVs. relation to all other voxelwise confound EVs.
``filename`` Path to the image file containing the data for this EV ``filename`` Path to the image file containing the data for this EV
``image`` Reference to the :class:`.Image` object
============ ========================================================== ============ ==========================================================
""" """
def __init__(self, index, voxIndex, title, filename): def __init__(self, index, voxIndex, title, filename):
...@@ -407,17 +466,13 @@ class VoxelwiseConfoundEV(EV): ...@@ -407,17 +466,13 @@ class VoxelwiseConfoundEV(EV):
``VoxelwiseConfoundEV`` in relation to all other ``VoxelwiseConfoundEV`` in relation to all other
voxelwise confound EVs. voxelwise confound EVs.
:arg title: Name of this ``VoxelwiseConfoundEV``. :arg title: Name of this ``VoxelwiseConfoundEV``.
:arg filename: Path to the file containing the data for this
``VoxelwiseConfoundEV``.
""" """
EV.__init__(self, index, title) EV.__init__(self, index, title)
VoxelwiseEVMixin.__init__(self, filename)
self.voxIndex = voxIndex self.voxIndex = voxIndex
if op.exists(filename):
self.filename = filename
else:
log.warning('Voxelwise confound EV file does '
'not exist: {}'.format(filename))
self.filename = None
def getFirstLevelEVs(featDir, settings, designMat): def getFirstLevelEVs(featDir, settings, designMat):
"""Derives the EVs for the given first level FEAT analysis. """Derives the EVs for the given first level FEAT analysis.
...@@ -445,11 +500,11 @@ def getFirstLevelEVs(featDir, settings, designMat): ...@@ -445,11 +500,11 @@ def getFirstLevelEVs(featDir, settings, designMat):
# - voxelwise EVs # - voxelwise EVs
for origIdx in range(origEVs): for origIdx in range(origEVs):
title = settings[ 'evtitle{}' .format(origIdx + 1)] title = settings[ f'evtitle{origIdx + 1}']
shape = int(settings[ 'shape{}' .format(origIdx + 1)]) shape = int(settings[ f'shape{origIdx + 1}'])
convolve = int(settings[ 'convolve{}' .format(origIdx + 1)]) convolve = int(settings[ f'convolve{origIdx + 1}'])
deriv = int(settings[ 'deriv_yn{}' .format(origIdx + 1)]) deriv = int(settings[ f'deriv_yn{origIdx + 1}'])
basis = int(settings.get('basisfnum{}'.format(origIdx + 1), -1)) basis = int(settings.get(f'basisfnum{origIdx + 1}', -1))
# Normal EV. This is just a column # Normal EV. This is just a column
# in the design matrix, defined by # in the design matrix, defined by
...@@ -468,8 +523,7 @@ def getFirstLevelEVs(featDir, settings, designMat): ...@@ -468,8 +523,7 @@ def getFirstLevelEVs(featDir, settings, designMat):
# The addExt function will raise an # The addExt function will raise an
# error if the file does not exist. # error if the file does not exist.
filename = op.join( filename = op.join(featDir, f'designVoxelwiseEV{origIdx + 1}')
featDir, 'designVoxelwiseEV{}'.format(origIdx + 1))
filename = fslimage.addExt(filename, True) filename = fslimage.addExt(filename, True)
evs.append(VoxelwiseEV(len(evs), origIdx, title, filename)) evs.append(VoxelwiseEV(len(evs), origIdx, title, filename))
...@@ -550,7 +604,7 @@ def getFirstLevelEVs(featDir, settings, designMat): ...@@ -550,7 +604,7 @@ def getFirstLevelEVs(featDir, settings, designMat):
startIdx = len(evs) + 1 startIdx = len(evs) + 1
if voxConfLocs != list(range(startIdx, startIdx + len(voxConfFiles))): if voxConfLocs != list(range(startIdx, startIdx + len(voxConfFiles))):
raise FSFError('Unsupported voxelwise confound ordering ' raise FSFError('Unsupported voxelwise confound ordering '
'({} -> {})'.format(startIdx, voxConfLocs)) f'({startIdx} -> {voxConfLocs})')
# Create the voxelwise confound EVs. # Create the voxelwise confound EVs.
# We make a name for the EV from the # We make a name for the EV from the
...@@ -623,7 +677,7 @@ def getHigherLevelEVs(featDir, settings, designMat): ...@@ -623,7 +677,7 @@ def getHigherLevelEVs(featDir, settings, designMat):
for origIdx in range(origEVs): for origIdx in range(origEVs):
# All we need is the title # All we need is the title
title = settings['evtitle{}'.format(origIdx + 1)] title = settings[f'evtitle{origIdx + 1}']
evs.append(NormalEV(len(evs), origIdx, title)) evs.append(NormalEV(len(evs), origIdx, title))
# Only the input file is specified for # Only the input file is specified for
...@@ -632,7 +686,7 @@ def getHigherLevelEVs(featDir, settings, designMat): ...@@ -632,7 +686,7 @@ def getHigherLevelEVs(featDir, settings, designMat):
# name. # name.
for origIdx in range(voxEVs): for origIdx in range(voxEVs):
filename = settings['evs_vox_{}'.format(origIdx + 1)] filename = settings[f'evs_vox_{origIdx + 1}']
title = op.basename(fslimage.removeExt(filename)) title = op.basename(fslimage.removeExt(filename))
evs.append(VoxelwiseEV(len(evs), origIdx, title, filename)) evs.append(VoxelwiseEV(len(evs), origIdx, title, filename))
...@@ -648,12 +702,12 @@ def loadDesignMat(designmat): ...@@ -648,12 +702,12 @@ def loadDesignMat(designmat):
:arg designmat: Path to the ``design.mat`` file. :arg designmat: Path to the ``design.mat`` file.
""" """
log.debug('Loading FEAT design matrix from {}'.format(designmat)) log.debug('Loading FEAT design matrix from %s', designmat)
matrix = np.loadtxt(designmat, comments='/', ndmin=2) matrix = np.loadtxt(designmat, comments='/', ndmin=2)
if matrix is None or matrix.size == 0 or len(matrix.shape) != 2: if matrix is None or matrix.size == 0 or len(matrix.shape) != 2:
raise FSFError('{} does not appear to be a ' raise FSFError(f'{designmat} does not appear '
'valid design.mat file'.format(designmat)) 'to be a valid design.mat file')
return matrix return matrix
...@@ -63,8 +63,8 @@ class FEATImage(fslimage.Image): ...@@ -63,8 +63,8 @@ class FEATImage(fslimage.Image):
path = op.join(path, 'filtered_func_data') path = op.join(path, 'filtered_func_data')
if not featanalysis.isFEATImage(path): if not featanalysis.isFEATImage(path):
raise ValueError('{} does not appear to be data ' raise ValueError(f'{path} does not appear to be '
'from a FEAT analysis'.format(path)) 'data from a FEAT analysis')
featDir = op.dirname(path) featDir = op.dirname(path)
settings = featanalysis.loadSettings( featDir) settings = featanalysis.loadSettings( featDir)
...@@ -72,9 +72,11 @@ class FEATImage(fslimage.Image): ...@@ -72,9 +72,11 @@ class FEATImage(fslimage.Image):
if featanalysis.hasStats(featDir): if featanalysis.hasStats(featDir):
design = featanalysis.loadDesign( featDir, settings) design = featanalysis.loadDesign( featDir, settings)
names, cons = featanalysis.loadContrasts(featDir) names, cons = featanalysis.loadContrasts(featDir)
ftests = featanalysis.loadFTests( featDir)
else: else:
design = None design = None
names, cons = [], [] names, cons = [], []
ftests = []
fslimage.Image.__init__(self, path, **kwargs) fslimage.Image.__init__(self, path, **kwargs)
...@@ -83,26 +85,31 @@ class FEATImage(fslimage.Image): ...@@ -83,26 +85,31 @@ class FEATImage(fslimage.Image):
self.__design = design self.__design = design
self.__contrastNames = names self.__contrastNames = names
self.__contrasts = cons self.__contrasts = cons
self.__ftests = ftests
self.__settings = settings self.__settings = settings
self.__residuals = None self.__residuals = None
self.__pes = [None] * self.numEVs() self.__pes = [None] * self.numEVs()
self.__copes = [None] * self.numContrasts() self.__copes = [None] * self.numContrasts()
self.__zstats = [None] * self.numContrasts() self.__zstats = [None] * self.numContrasts()
self.__zfstats = [None] * self.numFTests()
self.__clustMasks = [None] * self.numContrasts() self.__clustMasks = [None] * self.numContrasts()
self.__fclustMasks = [None] * self.numFTests()
if 'name' not in kwargs: if 'name' not in kwargs:
self.name = '{}: {}'.format(self.__analysisName, self.name) self.name = f'{self.__analysisName}: {self.name}'
def __del__(self): def __del__(self):
"""Clears references to any loaded images.""" """Clears references to any loaded images."""
self.__design = None self.__design = None
self.__residuals = None self.__residuals = None
self.__pes = None self.__pes = None
self.__copes = None self.__copes = None
self.__zstats = None self.__zstats = None
self.__clustMasks = None self.__zfstats = None
self.__clustMasks = None
self.__fclustMasks = None
def getFEATDir(self): def getFEATDir(self):
...@@ -191,6 +198,11 @@ class FEATImage(fslimage.Image): ...@@ -191,6 +198,11 @@ class FEATImage(fslimage.Image):
return len(self.__contrasts) return len(self.__contrasts)
def numFTests(self):
"""Returns the number of f-tests in the analysis."""
return len(self.__ftests)
def contrastNames(self): def contrastNames(self):
"""Returns a list containing the name of each contrast in the analysis. """Returns a list containing the name of each contrast in the analysis.
""" """
...@@ -206,6 +218,15 @@ class FEATImage(fslimage.Image): ...@@ -206,6 +218,15 @@ class FEATImage(fslimage.Image):
return [list(c) for c in self.__contrasts] return [list(c) for c in self.__contrasts]
def ftests(self):
"""Returns a list containing the analysis f-test vectors.
See :func:`.featanalysis.loadFTests`
"""
return [list(f) for f in self.__ftests]
def thresholds(self): def thresholds(self):
"""Returns the statistical thresholds used in the analysis. """Returns the statistical thresholds used in the analysis.
...@@ -214,14 +235,16 @@ class FEATImage(fslimage.Image): ...@@ -214,14 +235,16 @@ class FEATImage(fslimage.Image):
return featanalysis.getThresholds(self.__settings) return featanalysis.getThresholds(self.__settings)
def clusterResults(self, contrast): def clusterResults(self, contrast, ftest=False):
"""Returns the clusters found in the analysis. """Returns the clusters found in the analysis for the specified
contrast or f-test.
See :func:.featanalysis.loadClusterResults` See :func:.featanalysis.loadClusterResults`
""" """
return featanalysis.loadClusterResults(self.__featDir, return featanalysis.loadClusterResults(self.__featDir,
self.__settings, self.__settings,
contrast) contrast,
ftest)
def getPE(self, ev): def getPE(self, ev):
...@@ -229,12 +252,10 @@ class FEATImage(fslimage.Image): ...@@ -229,12 +252,10 @@ class FEATImage(fslimage.Image):
if self.__pes[ev] is None: if self.__pes[ev] is None:
pefile = featanalysis.getPEFile(self.__featDir, ev) pefile = featanalysis.getPEFile(self.__featDir, ev)
evname = self.evNames()[ev]
self.__pes[ev] = fslimage.Image( self.__pes[ev] = fslimage.Image(
pefile, pefile,
name='{}: PE{} ({})'.format( name=f'{self.__analysisName}: PE{ev + 1} ({evname})')
self.__analysisName,
ev + 1,
self.evNames()[ev]))
return self.__pes[ev] return self.__pes[ev]
...@@ -246,7 +267,7 @@ class FEATImage(fslimage.Image): ...@@ -246,7 +267,7 @@ class FEATImage(fslimage.Image):
resfile = featanalysis.getResidualFile(self.__featDir) resfile = featanalysis.getResidualFile(self.__featDir)
self.__residuals = fslimage.Image( self.__residuals = fslimage.Image(
resfile, resfile,
name='{}: residuals'.format(self.__analysisName)) name=f'{self.__analysisName}: residuals')
return self.__residuals return self.__residuals
...@@ -256,12 +277,10 @@ class FEATImage(fslimage.Image): ...@@ -256,12 +277,10 @@ class FEATImage(fslimage.Image):
if self.__copes[con] is None: if self.__copes[con] is None:
copefile = featanalysis.getCOPEFile(self.__featDir, con) copefile = featanalysis.getCOPEFile(self.__featDir, con)
conname = self.contrastNames()[con]
self.__copes[con] = fslimage.Image( self.__copes[con] = fslimage.Image(
copefile, copefile,
name='{}: COPE{} ({})'.format( name=f'{self.__analysisName}: COPE{con + 1} ({conname})')
self.__analysisName,
con + 1,
self.contrastNames()[con]))
return self.__copes[con] return self.__copes[con]
...@@ -270,35 +289,54 @@ class FEATImage(fslimage.Image): ...@@ -270,35 +289,54 @@ class FEATImage(fslimage.Image):
""" """
if self.__zstats[con] is None: if self.__zstats[con] is None:
zfile = featanalysis.getZStatFile(self.__featDir, con) zfile = featanalysis.getZStatFile(self.__featDir, con)
conname = self.contrastNames()[con]
self.__zstats[con] = fslimage.Image( self.__zstats[con] = fslimage.Image(
zfile, zfile,
name='{}: zstat{} ({})'.format( name=f'{self.__analysisName}: zstat{con + 1} ({conname})')
self.__analysisName,
con + 1,
self.contrastNames()[con]))
return self.__zstats[con] return self.__zstats[con]
def getZFStats(self, ftest):
"""Returns the Z statistic image for the given f-test (0-indexed). """
if self.__zfstats[ftest] is None:
zfile = featanalysis.getZFStatFile(self.__featDir, ftest)
self.__zfstats[ftest] = fslimage.Image(
zfile,
name=f'{self.__analysisName}: zfstat{ftest + 1}')
return self.__zfstats[ftest]
def getClusterMask(self, con): def getClusterMask(self, con):
"""Returns the cluster mask image for the given contrast (0-indexed). """Returns the cluster mask image for the given contrast (0-indexed).
""" """
if self.__clustMasks[con] is None: if self.__clustMasks[con] is None:
mfile = featanalysis.getClusterMaskFile(self.__featDir, con) mfile = featanalysis.getClusterMaskFile(self.__featDir, con)
conname = self.contrastNames()[con]
self.__clustMasks[con] = fslimage.Image( self.__clustMasks[con] = fslimage.Image(
mfile, mfile,
name='{}: cluster mask for zstat{} ({})'.format( name=f'{self.__analysisName}: cluster mask '
self.__analysisName, f'for zstat{con + 1} ({conname})')
con + 1,
self.contrastNames()[con]))
return self.__clustMasks[con] return self.__clustMasks[con]
def getFClusterMask(self, ftest):
"""Returns the cluster mask image for the given f-test (0-indexed).
"""
if self.__fclustMasks[ftest] is None:
mfile = featanalysis.getFClusterMaskFile(self.__featDir, ftest)
self.__fclustMasks[ftest] = fslimage.Image(
mfile,
name=f'{self.__analysisName}: cluster mask '
f'for zfstat{ftest + 1}')
return self.__fclustMasks[ftest]
def fit(self, contrast, xyz): def fit(self, contrast, xyz):
"""Calculates the model fit for the given contrast vector """Calculates the model fit for the given contrast vector
at the given voxel. See the :func:`modelFit` function. at the given voxel. See the :func:`modelFit` function.
......
...@@ -16,18 +16,21 @@ ...@@ -16,18 +16,21 @@
""" """
import os.path as op import itertools as it
import math
import os.path as op
def loadLabelFile(filename, def loadLabelFile(filename,
includeLabel=None, includeLabel=None,
excludeLabel=None, excludeLabel=None,
returnIndices=False): returnIndices=False,
"""Loads component labels from the specified file. The file is assuemd missingLabel='Unknown',
returnProbabilities=False):
"""Loads component labels from the specified file. The file is assumed
to be of the format generated by FIX, Melview or ICA-AROMA; such a file to be of the format generated by FIX, Melview or ICA-AROMA; such a file
should have a structure resembling the following:: should have a structure resembling the following::
filtered_func_data.ica filtered_func_data.ica
1, Signal, False 1, Signal, False
2, Unclassified Noise, True 2, Unclassified Noise, True
...@@ -39,7 +42,6 @@ def loadLabelFile(filename, ...@@ -39,7 +42,6 @@ def loadLabelFile(filename,
8, Signal, False 8, Signal, False
[2, 5, 6, 7] [2, 5, 6, 7]
.. note:: This function will also parse files which only contain a .. note:: This function will also parse files which only contain a
component list, e.g.:: component list, e.g.::
...@@ -66,31 +68,46 @@ def loadLabelFile(filename, ...@@ -66,31 +68,46 @@ def loadLabelFile(filename,
- One or more labels for the component (multiple labels must be - One or more labels for the component (multiple labels must be
comma-separated). comma-separated).
- ``'True'`` if the component has been classified as *bad*, - ``'True'`` if the component has been classified as *bad*, ``'False'``
``'False'`` otherwise. This field is optional - if the last otherwise. This field is optional - if the last non-numeric
comma-separated token on a line is not equal (case-insensitive) comma-separated token on a line is not equal to ``True`` or ``False``
to ``True`` or ``False``, it is interpreted as a component label. (case-insensitive) , it is interpreted as a component label.
- A value between 0 and 1, which gives the probability of the component
being signal, as generated by an automatic classifier (e.g. FIX). This
field is optional - it is output by some versions of FIX.
The last line of the file contains the index (starting from 1) of all The last line of the file contains the index (starting from 1) of all
*bad* components, i.e. those components which are not classified as *bad* components, i.e. those components which are not classified as
signal or unknown. signal or unknown.
:arg filename: Name of the label file to load. :arg filename: Name of the label file to load.
:arg includeLabel: If the file contains a single line containing a
list component indices, this label will be used
for the components in the list. Defaults to
``'Unclassified noise'`` for FIX-like files, and
``'Movement'`` for ICA-AROMA-like files.
:arg includeLabel: If the file contains a single line containing a list :arg excludeLabel: If the file contains a single line containing
component indices, this label will be used for the component indices, this label will be used for
components in the list. Defaults to 'Unclassified the components that are not in the list.
noise' for FIX-like files, and 'Movement' for Defaults to ``'Signal'`` for FIX-like files, and
ICA-AROMA-like files. ``'Unknown'`` for ICA-AROMA-like files.
:arg excludeLabel: If the file contains a single line containing component :arg returnIndices: Defaults to ``False``. If ``True``, a list
indices, this label will be used for the components containing the noisy component numbers that were
that are not in the list. Defaults to 'Signal' for listed in the file is returned.
FIX-like files, and 'Unknown' for ICA-AROMA-like files.
:arg returnIndices: Defaults to ``False``. If ``True``, a list containing :arg missingLabel: Label to use for any components which are not
the noisy component numbers that were listed in the present (only used for label files, not for noise
file is returned. component files).
:arg returnProbabilities: Defaults to ``False``. If ``True``, a list
containing the component classification
probabilities is returned. If the file does not
contain probabilities, every value in this list
will be nan.
:returns: A tuple containing: :returns: A tuple containing:
...@@ -102,72 +119,55 @@ def loadLabelFile(filename, ...@@ -102,72 +119,55 @@ def loadLabelFile(filename,
- If ``returnIndices is True``, a list of the noisy component - If ``returnIndices is True``, a list of the noisy component
indices (starting from 1) that were specified in the file. indices (starting from 1) that were specified in the file.
- If ``returnProbabilities is True``, a list of the component
classification probabilities that were specified in the
file (all nan if they are not in the file).
.. note:: Some label files generated by old versions of FIX/Melview do
not contain a line for every component (unknown/unlabelled
components may not be listed). For these files, and also for
files which only contain a component list, there is no way of
knowing how many components were in the data, so the returned
list may contain fewer entries than there are components.
""" """
signalLabels = None filename = op.abspath(filename)
filename = op.abspath(filename) probabilities = None
signalLabels = None
with open(filename, 'rt') as f: with open(filename, 'rt') as f:
lines = f.readlines() lines = f.readlines()
if len(lines) < 1: if len(lines) < 1:
raise InvalidLabelFileError('Invalid FIX classification ' raise InvalidLabelFileError(f'{filename}: Invalid FIX classification '
'file - not enough lines') 'file - not enough lines')
lines = [l.strip() for l in lines] lines = [l.strip() for l in lines]
lines = [l for l in lines if l != ''] lines = [l for l in lines if l != '']
# If the file contains a single # If the file contains one or two lines, we
# line, we assume that it is just # assume that it is just a comma-separated list
# a comma-separated list of noise # of noise components (possibly preceeded by
# components. # the MELODIC directory path)
if len(lines) == 1: if len(lines) <= 2:
melDir, noisyComps, allLabels, signalLabels = \
line = lines[0] _parseSingleLineLabelFile(lines, includeLabel, excludeLabel)
probabilities = [math.nan] * len(allLabels)
# if the list is contained in
# square brackets, we assume
# that it is a FIX output file,
# where included components have
# been classified as noise, and
# excluded components as signal.
#
# Otherwise we assume that it
# is an AROMA file, where
# included components have
# been classified as being due
# to motion, and excluded
# components unclassified.
if includeLabel is None:
if line[0] == '[': includeLabel = 'Unclassified noise'
else: includeLabel = 'Movement'
if excludeLabel is None:
if line[0] == '[': excludeLabel = 'Signal'
else: excludeLabel = 'Unknown'
else:
signalLabels = [excludeLabel]
# Remove any leading/trailing
# whitespace or brackets.
line = lines[0].strip(' []')
melDir = None
noisyComps = [int(i) for i in line.split(',')]
allLabels = []
for i in range(max(noisyComps)):
if (i + 1) in noisyComps: allLabels.append([includeLabel])
else: allLabels.append([excludeLabel])
# Otherwise, we assume that
# it is a full label file.
else:
melDir = lines[0] # Otherwise, we assume that it is a full label file.
noisyComps = lines[-1].strip(' []').split(',') else:
noisyComps = [c for c in noisyComps if c != ''] melDir, noisyComps, allLabels, probabilities = \
noisyComps = [int(c) for c in noisyComps] _parseFullLabelFile(filename, lines, missingLabel)
# There's no way to validate
# the melodic directory path,
# but let's try anyway.
if melDir is not None:
if len(melDir.split(',')) >= 3:
raise InvalidLabelFileError(
f'{filename}: First line does not look like '
f'a MELODIC directory path: {melDir}')
# The melodic directory path should # The melodic directory path should
# either be an absolute path, or # either be an absolute path, or
...@@ -176,38 +176,6 @@ def loadLabelFile(filename, ...@@ -176,38 +176,6 @@ def loadLabelFile(filename,
if not op.isabs(melDir): if not op.isabs(melDir):
melDir = op.join(op.dirname(filename), melDir) melDir = op.join(op.dirname(filename), melDir)
# Parse the labels for every component
allLabels = []
for i, compLine in enumerate(lines[1:-1]):
tokens = compLine.split(',')
tokens = [t.strip() for t in tokens]
if len(tokens) < 3:
raise InvalidLabelFileError(
'Invalid FIX classification file - '
'line {}: {}'.format(i + 1, compLine))
try:
compIdx = int(tokens[0])
except ValueError:
raise InvalidLabelFileError(
'Invalid FIX classification file - '
'line {}: {}'.format(i + 1, compLine))
if tokens[-1].lower() in ('true', 'false'):
compLabels = tokens[1:-1]
else:
compLabels = tokens[1:]
if compIdx != i + 1:
raise InvalidLabelFileError(
'Invalid FIX classification file - wrong component '
'number at line {}: {}'.format(i + 1, compLine))
allLabels.append(compLabels)
# Validate the labels against # Validate the labels against
# the noisy list - all components # the noisy list - all components
# in the noisy list should not # in the noisy list should not
...@@ -218,8 +186,8 @@ def loadLabelFile(filename, ...@@ -218,8 +186,8 @@ def loadLabelFile(filename,
noise = isNoisyComponent(labels, signalLabels) noise = isNoisyComponent(labels, signalLabels)
if noise and (comp not in noisyComps): if noise and (comp not in noisyComps):
raise InvalidLabelFileError('Noisy component {} has invalid ' raise InvalidLabelFileError(f'{filename}: Noisy component {comp} '
'labels: {}'.format(comp, labels)) f'has invalid labels: {labels}')
for comp in noisyComps: for comp in noisyComps:
...@@ -228,44 +196,187 @@ def loadLabelFile(filename, ...@@ -228,44 +196,187 @@ def loadLabelFile(filename,
noise = isNoisyComponent(labels, signalLabels) noise = isNoisyComponent(labels, signalLabels)
if not noise: if not noise:
raise InvalidLabelFileError('Noisy component {} is missing ' raise InvalidLabelFileError(f'{filename}: Noisy component {comp} '
'a noise label'.format(comp)) 'is missing a noise label')
retval = [melDir, allLabels]
if returnIndices: return melDir, allLabels, noisyComps if returnIndices: retval.append(noisyComps)
else: return melDir, allLabels if returnProbabilities: retval.append(probabilities)
return tuple(retval)
def _parseSingleLineLabelFile(lines, includeLabel, excludeLabel):
"""Called by :func:`loadLabelFile`. Parses the contents of an
ICA-AROMA-style label file which just contains a list of noise
components (and possibly the MELODIC directory path), e.g.::
filtered_func_data.ica
[2, 5, 6, 7]
"""
signalLabels = None
noisyComps = lines[-1]
if len(lines) == 2: melDir = lines[0]
else: melDir = None
# if the list is contained in
# square brackets, we assume
# that it is a FIX output file,
# where included components have
# been classified as noise, and
# excluded components as signal.
#
# Otherwise we assume that it
# is an AROMA file, where
# included components have
# been classified as being due
# to motion, and excluded
# components unclassified.
if includeLabel is None:
if noisyComps[0] == '[': includeLabel = 'Unclassified noise'
else: includeLabel = 'Movement'
if excludeLabel is None:
if noisyComps[0] == '[': excludeLabel = 'Signal'
else: excludeLabel = 'Unknown'
else:
signalLabels = [excludeLabel]
# Remove any leading/trailing
# whitespace or brackets.
noisyComps = noisyComps.strip(' []')
noisyComps = [int(i) for i in noisyComps.split(',')]
allLabels = []
for i in range(max(noisyComps)):
if (i + 1) in noisyComps: allLabels.append([includeLabel])
else: allLabels.append([excludeLabel])
return melDir, noisyComps, allLabels, signalLabels
def _parseFullLabelFile(filename, lines, missingLabel):
"""Called by :func:`loadLabelFile`. Parses the contents of a
FIX/Melview-style label file which contains labels for each component,
e.g.:
filtered_func_data.ica
1, Signal, False
2, Unclassified Noise, True
3, Unknown, False
4, Signal, False
5, Unclassified Noise, True
6, Unclassified Noise, True
7, Unclassified Noise, True
8, Signal, False
[2, 5, 6, 7]
"""
melDir = lines[0]
noisyComps = lines[-1].strip(' []').split(',')
noisyComps = [c for c in noisyComps if c != '']
noisyComps = [int(c) for c in noisyComps]
# Parse the labels for every component.
# Initially store as a {comp : ([labels], probability)} dict.
allLabels = {}
for i, compLine in enumerate(lines[1:-1]):
tokens = compLine.split(',')
tokens = [t.strip() for t in tokens]
if len(tokens) < 3:
raise InvalidLabelFileError(
f'{filename}: Invalid FIX classification '
f'file - line: {i + 1}: {compLine}')
try:
compIdx = int(tokens[0])
if compIdx in allLabels:
raise ValueError()
except ValueError:
raise InvalidLabelFileError(
f'{filename}: Invalid FIX classification '
f'file - line {i + 1}: {compLine}')
tokens = tokens[1:]
probability = math.nan
# last token could be classification probability
if _isfloat(tokens[-1]):
probability = float(tokens[-1])
tokens = tokens[:-1]
# true/false is ignored as it is superfluous
if tokens[-1].lower() in ('true', 'false'):
tokens = tokens[:-1]
allLabels[compIdx] = tokens, probability
# Convert {comp : [labels]} into a list
# of lists, filling in missing components
allLabelsList = []
probabilities = []
for i in range(max(it.chain(allLabels.keys(), noisyComps))):
labels, prob = allLabels.get(i + 1, ([missingLabel], math.nan))
allLabelsList.append(labels)
probabilities.append(prob)
allLabels = allLabelsList
return melDir, noisyComps, allLabels, probabilities
def _isfloat(s):
"""Returns True if the given string appears to contain a floating
point number, False otherwise.
"""
try:
float(s)
return True
except Exception:
return False
def saveLabelFile(allLabels, def saveLabelFile(allLabels,
filename, filename,
dirname=None, dirname=None,
listBad=True, listBad=True,
signalLabels=None): signalLabels=None,
probabilities=None):
"""Saves the given classification labels to the specified file. The """Saves the given classification labels to the specified file. The
classifications are saved in the format described in the classifications are saved in the format described in the
:func:`loadLabelFile` method. :func:`loadLabelFile` method.
:arg allLabels: A list of lists, one list for each component, where :arg allLabels: A list of lists, one list for each component, where
each list contains the labels for the corresponding each list contains the labels for the corresponding
component. component.
:arg filename: Name of the file to which the labels should be saved. :arg filename: Name of the file to which the labels should be saved.
:arg dirname: If provided, is output as the first line of the file. :arg dirname: If provided, is output as the first line of the file.
Intended to be a relative path to the MELODIC analysis Intended to be a relative path to the MELODIC analysis
directory with which this label file is associated. If directory with which this label file is associated. If
not provided, a ``'.'`` is output as the first line. not provided, a ``'.'`` is output as the first line.
:arg listBad: If ``True`` (the default), the last line of the file :arg listBad: If ``True`` (the default), the last line of the file
will contain a comma separated list of components which will contain a comma separated list of components which
are deemed 'noisy' (see :func:`isNoisyComponent`). are deemed 'noisy' (see :func:`isNoisyComponent`).
:arg signalLabels: Labels which should be deemed 'signal' - see the :arg signalLabels: Labels which should be deemed 'signal' - see the
:func:`isNoisyComponent` function. :func:`isNoisyComponent` function.
:arg probabilities: Classification probabilities. If provided, the
probability for each component is saved to the file.
""" """
lines = [] lines = []
noisyComps = [] noisyComps = []
if probabilities is not None and len(probabilities) != len(allLabels):
raise ValueError('len(probabilities) != len(allLabels)')
# The first line - the melodic directory name # The first line - the melodic directory name
if dirname is None: if dirname is None:
dirname = '.' dirname = '.'
...@@ -283,6 +394,9 @@ def saveLabelFile(allLabels, ...@@ -283,6 +394,9 @@ def saveLabelFile(allLabels,
labels = [l.replace(',', '_') for l in labels] labels = [l.replace(',', '_') for l in labels]
tokens = [str(comp)] + labels + [str(noise)] tokens = [str(comp)] + labels + [str(noise)]
if probabilities is not None:
tokens.append(f'{probabilities[i]:0.6f}')
lines.append(', '.join(tokens)) lines.append(', '.join(tokens))
if noise: if noise:
...@@ -318,4 +432,3 @@ class InvalidLabelFileError(Exception): ...@@ -318,4 +432,3 @@ class InvalidLabelFileError(Exception):
"""Exception raised by the :func:`loadLabelFile` function when an attempt """Exception raised by the :func:`loadLabelFile` function when an attempt
is made to load an invalid label file. is made to load an invalid label file.
""" """
pass
...@@ -67,7 +67,8 @@ CORE_GEOMETRY_FILES = ['?h.orig', ...@@ -67,7 +67,8 @@ CORE_GEOMETRY_FILES = ['?h.orig',
'?h.pial', '?h.pial',
'?h.white', '?h.white',
'?h.inflated', '?h.inflated',
'?h.sphere'] '?h.sphere',
'?h.pial_semi_inflated']
"""File patterns for identifying the core Freesurfer geometry files. """ """File patterns for identifying the core Freesurfer geometry files. """
...@@ -76,7 +77,8 @@ CORE_GEOMETRY_DESCRIPTIONS = [ ...@@ -76,7 +77,8 @@ CORE_GEOMETRY_DESCRIPTIONS = [
"Freesurfer surface (pial)", "Freesurfer surface (pial)",
"Freesurfer surface (white matter)", "Freesurfer surface (white matter)",
"Freesurfer surface (inflated)", "Freesurfer surface (inflated)",
"Freesurfer surface (sphere)"] "Freesurfer surface (sphere)",
"Freesurfer surface (pial semi-inflated)"]
"""A description for each extension in :attr:`GEOMETRY_EXTENSIONS`. """ """A description for each extension in :attr:`GEOMETRY_EXTENSIONS`. """
......
...@@ -19,31 +19,43 @@ are available: ...@@ -19,31 +19,43 @@ are available:
GiftiMesh GiftiMesh
loadGiftiMesh loadGiftiMesh
loadGiftiVertexData loadGiftiVertexData
prepareGiftiVertexData
relatedFiles relatedFiles
""" """
import glob import glob
import re
import os.path as op import os.path as op
import numpy as np import numpy as np
import nibabel as nib import nibabel as nib
import fsl.utils.path as fslpath import fsl.utils.path as fslpath
import fsl.utils.bids as bids
import fsl.data.constants as constants import fsl.data.constants as constants
import fsl.data.mesh as fslmesh import fsl.data.mesh as fslmesh
ALLOWED_EXTENSIONS = ['.surf.gii'] ALLOWED_EXTENSIONS = ['.surf.gii', '.gii']
"""List of file extensions that a file containing Gifti surface data """List of file extensions that a file containing Gifti surface data
is expected to have. is expected to have.
""" """
EXTENSION_DESCRIPTIONS = ['GIFTI surface file'] EXTENSION_DESCRIPTIONS = ['GIFTI surface file', 'GIFTI file']
"""A description for each of the :data:`ALLOWED_EXTENSIONS`. """ """A description for each of the :data:`ALLOWED_EXTENSIONS`. """
VERTEX_DATA_EXTENSIONS = ['.func.gii',
'.shape.gii',
'.label.gii',
'.time.gii']
"""File suffixes which are interpreted as GIFTI vertex data files,
containing data values for every vertex in the mesh.
"""
class GiftiMesh(fslmesh.Mesh): class GiftiMesh(fslmesh.Mesh):
"""Class which represents a GIFTI surface image. This is essentially """Class which represents a GIFTI surface image. This is essentially
just a 3D model made of triangles. just a 3D model made of triangles.
...@@ -58,7 +70,13 @@ class GiftiMesh(fslmesh.Mesh): ...@@ -58,7 +70,13 @@ class GiftiMesh(fslmesh.Mesh):
"""Load the given GIFTI file using ``nibabel``, and extracts surface """Load the given GIFTI file using ``nibabel``, and extracts surface
data using the :func:`loadGiftiMesh` function. data using the :func:`loadGiftiMesh` function.
:arg infile: A GIFTI surface file (``*.surf.gii``). If the file contains more than one set of vertices, the additional
ones are added with keys of the form ``infile_i``, where ``infile`` is
the absolute path to the file, and ``i`` is an index number, starting
from 1. See the :meth:`.addVertices` method.
:arg infile: A GIFTI file (``*.gii``) which contains a surface
definition.
:arg fixWinding: Passed through to the :meth:`addVertices` method :arg fixWinding: Passed through to the :meth:`addVertices` method
for the first vertex set. for the first vertex set.
...@@ -74,34 +92,60 @@ class GiftiMesh(fslmesh.Mesh): ...@@ -74,34 +92,60 @@ class GiftiMesh(fslmesh.Mesh):
name = fslpath.removeExt(op.basename(infile), ALLOWED_EXTENSIONS) name = fslpath.removeExt(op.basename(infile), ALLOWED_EXTENSIONS)
infile = op.abspath(infile) infile = op.abspath(infile)
surfimg, vertices, indices = loadGiftiMesh(infile) surfimg, indices, vertices, vdata = loadGiftiMesh(infile)
fslmesh.Mesh.__init__(self, fslmesh.Mesh.__init__(self,
indices, indices,
name=name, name=name,
dataSource=infile) dataSource=infile)
self.addVertices(vertices, infile, fixWinding=fixWinding) for i, v in enumerate(vertices):
self.setMeta(infile, surfimg) if i == 0: key = infile
else: key = f'{infile}_{i}'
self.addVertices(v, key, select=(i == 0), fixWinding=fixWinding)
self.meta[infile] = surfimg
# Copy all metadata entries for the GIFTI image
for k, v in surfimg.meta.items():
self.meta[k] = v
# and also for each GIFTI data array - triangles
# are stored under "faces", and pointsets are
# stored under "vertices"/[0,1,2...] (as there may
# be multiple pointsets in a file)
self.meta['vertices'] = {}
for i, arr in enumerate(surfimg.darrays):
if arr.intent == constants.NIFTI_INTENT_POINTSET:
self.meta['vertices'][i] = dict(arr.meta)
elif arr.intent == constants.NIFTI_INTENT_TRIANGLE:
self.meta['faces'] = dict(arr.meta)
if vdata is not None:
self.addVertexData(infile, vdata)
# Find and load all other # Find and load all other
# surfaces in the same directory # surfaces in the same directory
# as the specfiied one. # as the specfiied one.
if loadAll: if loadAll:
nvertices = vertices.shape[0] # Only attempt to auto-load sensibly
surfFiles = relatedFiles(infile, ALLOWED_EXTENSIONS) # named gifti files (i.e. *.surf.gii,
# rather than *.gii).
surfFiles = relatedFiles(infile, [ALLOWED_EXTENSIONS[0]])
nvertices = vertices[0].shape[0]
for sfile in surfFiles: for sfile in surfFiles:
surfimg, vertices, _ = loadGiftiMesh(sfile) try:
surfimg, _, vertices, _ = loadGiftiMesh(sfile)
except Exception:
continue
if vertices.shape[0] != nvertices: if vertices[0].shape[0] != nvertices:
continue continue
self.addVertices(vertices, sfile, select=False) self.addVertices(vertices[0], sfile, select=False)
self.setMeta( sfile, surfimg) self.meta[sfile] = surfimg
def loadVertices(self, infile, key=None, *args, **kwargs): def loadVertices(self, infile, key=None, *args, **kwargs):
...@@ -121,11 +165,14 @@ class GiftiMesh(fslmesh.Mesh): ...@@ -121,11 +165,14 @@ class GiftiMesh(fslmesh.Mesh):
if key is None: if key is None:
key = infile key = infile
surfimg, vertices, _ = loadGiftiMesh(infile) surfimg, _, vertices, _ = loadGiftiMesh(infile)
vertices = self.addVertices(vertices, key, *args, **kwargs) for i, v in enumerate(vertices):
if i == 0: key = infile
else: key = f'{infile}_{i}'
vertices[i] = self.addVertices(v, key, *args, **kwargs)
self.setMeta(infile, surfimg) self.meta[infile] = surfimg
return vertices return vertices
...@@ -155,9 +202,10 @@ def loadGiftiMesh(filename): ...@@ -155,9 +202,10 @@ def loadGiftiMesh(filename):
The image is expected to contain the following``<DataArray>`` elements: The image is expected to contain the following``<DataArray>`` elements:
- one comprising ``NIFTI_INTENT_POINTSET`` data (the surface vertices)
- one comprising ``NIFTI_INTENT_TRIANGLE`` data (vertex indices - one comprising ``NIFTI_INTENT_TRIANGLE`` data (vertex indices
defining the triangles). defining the triangles).
- one or more comprising ``NIFTI_INTENT_POINTSET`` data (the surface
vertices)
A ``ValueError`` will be raised if this is not the case. A ``ValueError`` will be raised if this is not the case.
...@@ -167,42 +215,65 @@ def loadGiftiMesh(filename): ...@@ -167,42 +215,65 @@ def loadGiftiMesh(filename):
- The loaded ``nibabel.gifti.GiftiImage`` instance - The loaded ``nibabel.gifti.GiftiImage`` instance
- A ``(N, 3)`` array containing ``N`` vertices. - A ``(M, 3)`` array containing the vertex indices for
- A ``(M, 3))`` array containing the vertex indices for
``M`` triangles. ``M`` triangles.
- A list of at least one ``(N, 3)`` arrays containing ``N``
vertices.
- A ``(M, N)`` numpy array containing ``N`` data points for
``M`` vertices, or ``None`` if the file does not contain
any vertex data.
""" """
gimg = nib.load(filename) gimg = nib.load(filename)
pointsetCode = constants.NIFTI_INTENT_POINTSET pscode = constants.NIFTI_INTENT_POINTSET
triangleCode = constants.NIFTI_INTENT_TRIANGLE tricode = constants.NIFTI_INTENT_TRIANGLE
pointsets = [d for d in gimg.darrays if d.intent == pointsetCode] pointsets = [d for d in gimg.darrays if d.intent == pscode]
triangles = [d for d in gimg.darrays if d.intent == triangleCode] triangles = [d for d in gimg.darrays if d.intent == tricode]
vdata = [d for d in gimg.darrays if d.intent not in (pscode, tricode)]
if len(gimg.darrays) != 2: if len(triangles) != 1:
raise ValueError('{}: GIFTI surface files must contain ' raise ValueError(f'{filename}: GIFTI surface files must '
'exactly one pointset array and one ' 'contain exactly one triangle array')
'triangle array'.format(filename))
if len(pointsets) != 1: if len(pointsets) == 0:
raise ValueError('{}: GIFTI surface files must contain ' raise ValueError(f'{filename}: GIFTI surface files must '
'exactly one pointset array'.format(filename)) 'contain at least one pointset array')
if len(triangles) != 1: vertices = [ps.data for ps in pointsets]
raise ValueError('{}: GIFTI surface files must contain ' indices = np.atleast_2d(triangles[0].data)
'exactly one triangle array'.format(filename))
vertices = pointsets[0].data if len(vdata) == 0: vdata = None
indices = triangles[0].data else: vdata = prepareGiftiVertexData(vdata, filename)
return gimg, vertices, indices return gimg, indices, vertices, vdata
def loadGiftiVertexData(filename): def loadGiftiVertexData(filename):
"""Loads vertex data from the given GIFTI file. """Loads vertex data from the given GIFTI file.
See :func:`prepareGiftiVertexData`.
Returns a tuple containing:
- The loaded ``nibabel.gifti.GiftiImage`` object
- A ``(M, N)`` numpy array containing ``N`` data points for ``M``
vertices
"""
gimg = nib.load(filename)
return gimg, prepareGiftiVertexData(gimg.darrays, filename)
def prepareGiftiVertexData(darrays, filename=None):
"""Prepares vertex data from the given list of GIFTI data arrays.
All of the data arrays are concatenated into one ``(M, N)`` array,
containing ``N`` data points for ``M`` vertices.
It is assumed that the given file does not contain any It is assumed that the given file does not contain any
``NIFTI_INTENT_POINTSET`` or ``NIFTI_INTENT_TRIANGLE`` data arrays, and ``NIFTI_INTENT_POINTSET`` or ``NIFTI_INTENT_TRIANGLE`` data arrays, and
which contains either: which contains either:
...@@ -213,49 +284,42 @@ def loadGiftiVertexData(filename): ...@@ -213,49 +284,42 @@ def loadGiftiVertexData(filename):
- One or more ``(M, 1)`` data arrays each containing a single data point - One or more ``(M, 1)`` data arrays each containing a single data point
for ``M`` vertices, and all with the same intent code for ``M`` vertices, and all with the same intent code
Returns a tuple containing: Returns a ``(M, N)`` numpy array containing ``N`` data points for ``M``
vertices.
- The loaded ``nibabel.gifti.GiftiImage`` object
- A ``(M, N)`` numpy array containing ``N`` data points for ``M``
vertices
""" """
gimg = nib.load(filename) intents = {d.intent for d in darrays}
intents = set([d.intent for d in gimg.darrays])
if len(intents) != 1: if len(intents) != 1:
raise ValueError('{} contains multiple (or no) intents' raise ValueError(f'{filename} contains multiple '
': {}'.format(filename, intents)) f'(or no) intents: {intents}')
intent = intents.pop() intent = intents.pop()
if intent in (constants.NIFTI_INTENT_POINTSET, if intent in (constants.NIFTI_INTENT_POINTSET,
constants.NIFTI_INTENT_TRIANGLE): constants.NIFTI_INTENT_TRIANGLE):
raise ValueError(f'{filename} contains surface data')
raise ValueError('{} contains surface data'.format(filename))
# Just a single array - return it as-is. # Just a single array - return it as-is.
# n.b. Storing (M, N) data in a single # n.b. Storing (M, N) data in a single
# DataArray goes against the GIFTI spec, # DataArray goes against the GIFTI spec,
# but hey, it happens. # but hey, it happens.
if len(gimg.darrays) == 1: if len(darrays) == 1:
vdata = gimg.darrays[0].data vdata = darrays[0].data
return gimg, vdata.reshape(vdata.shape[0], -1) return vdata.reshape(vdata.shape[0], -1)
# Otherwise extract and concatenate # Otherwise extract and concatenate
# multiple 1-dimensional arrays # multiple 1-dimensional arrays
vdata = [d.data for d in gimg.darrays] vdata = [d.data for d in darrays]
if any([len(d.shape) != 1 for d in vdata]): if any([len(d.shape) != 1 for d in vdata]):
raise ValueError('{} contains one or more non-vector ' raise ValueError(f'{filename} contains one or '
'darrays'.format(filename)) 'more non-vector darrays')
vdata = np.vstack(vdata).T vdata = np.vstack(vdata).T
vdata = vdata.reshape(vdata.shape[0], -1) vdata = vdata.reshape(vdata.shape[0], -1)
return gimg, vdata return vdata
def relatedFiles(fname, ftypes=None): def relatedFiles(fname, ftypes=None):
...@@ -263,45 +327,110 @@ def relatedFiles(fname, ftypes=None): ...@@ -263,45 +327,110 @@ def relatedFiles(fname, ftypes=None):
directory which appear to be related with the given one. Files which directory which appear to be related with the given one. Files which
share the same prefix are assumed to be related to the given file. share the same prefix are assumed to be related to the given file.
This function assumes that the GIFTI files are named according to a
standard convention - the following conventions are supported:
- HCP-style, i.e.: ``<subject>.<hemi>.<type>.<space>.<ftype>.gii``
- BIDS-style, i.e.:
``<source_prefix>_hemi-<hemi>[_space-<space>]*_<suffix>.<ftype>.gii``
If the files are not named according to one of these conventions, this
function will return an empty list.
:arg fname: Name of the file to search for related files for :arg fname: Name of the file to search for related files for
:arg ftype: If provided, only files with suffixes in this list are :arg ftype: If provided, only files with suffixes in this list are
searched for. Defaults to files which contain vertex data. searched for. Defaults to :attr:`VERTEX_DATA_EXTENSIONS`.
""" """
if ftypes is None: if ftypes is None:
ftypes = ['.func.gii', '.shape.gii', '.label.gii', '.time.gii'] ftypes = VERTEX_DATA_EXTENSIONS
# We want to return all files in the same path = op.abspath(fname)
# directory which have the following name: dirname, fname = op.split(path)
# We want to identify all files in the same
# directory which are associated with the
# given file. We assume that the files are
# named according to one of the following
# conventions:
# #
# [subj].[hemi].[type].*.[ftype] # - HCP style:
# <subject>.<hemi>.<type>.<space>.<ftype>.gii
# #
# - BIDS style:
# where # <source_prefix>_hemi-<hemi>[_space-<space>]*.<ftype>.gii
# - [subj] is the subject ID, and matches fname
# #
# - [hemi] is the hemisphere, and matches fname # We cannot assume consistent ordering of
# the entities (key-value pairs) within a
# BIDS style filename, so we cannot simply
# use a regular expression or glob pattern.
# Instead, for each style we define:
# #
# - [type] defines the file contents # - a "matcher" function, which tests
# whether the file matches the style,
# and returns the important elements
# from the file name.
# #
# - suffix is func, shape, label, time, or `ftype` # - a "searcher" function, which takes
# the elements of the input file
path = op.abspath(fname) # that were extracted by the matcher,
dirname, fname = op.split(path) # and searches for other related files
# get the [subj].[hemi] prefix # HCP style - extract "<subject>.<hemi>"
try: # and "<space>".
subj, hemi, _ = fname.split('.', 2) def matchhcp(f):
prefix = '.'.join((subj, hemi)) pat = r'^(.*\.[LR])\..*\.(.*)\..*\.gii$'
except Exception: match = re.match(pat, f)
if match:
return match.groups()
else:
return None
def searchhcp(match, ftype):
prefix, space = match
template = f'{prefix}.*.{space}{ftype}'
return glob.glob(op.join(dirname, template))
# BIDS style - extract all entities (kv
# pairs), ignoring specific irrelevant
# ones.
def matchbids(f):
try: match = bids.BIDSFile(f)
except ValueError: return None
match.entities.pop('desc', None)
return match
def searchbids(match, ftype):
allfiles = glob.glob(op.join(dirname, '*{}'.format(ftype)))
for f in allfiles:
try: bf = bids.BIDSFile(f)
except ValueError: continue
if bf.match(match, False):
yield f
# find the first style that matches
matchers = [matchhcp, matchbids]
searchers = [searchhcp, searchbids]
for matcher, searcher in zip(matchers, searchers):
match = matcher(fname)
if match:
break
# Give up if the file does
# not match any known style.
else:
return [] return []
# Build a list of files in the same
# directory and matching the template
related = [] related = []
for ftype in ftypes: for ftype in ftypes:
related.extend(
glob.glob(op.join(dirname, '{}*{}'.format(prefix, ftype))))
hits = searcher(match, ftype)
# eliminate dupes
related.extend([h for h in hits if h not in related])
# exclude the file itself
return [r for r in related if r != path] return [r for r in related if r != path]
...@@ -32,29 +32,37 @@ and file names: ...@@ -32,29 +32,37 @@ and file names:
""" """
import os import os
import os.path as op import os.path as op
import shutil import itertools as it
import string import collections.abc as abc
import logging import enum
import tempfile import json
import warnings import string
import logging
import six import tempfile
import numpy as np
import scipy.ndimage as ndimage from pathlib import Path
from typing import Union
import numpy as np
import numpy.linalg as npla
import nibabel as nib import nibabel as nib
import nibabel.fileslice as fileslice import nibabel.fileslice as fileslice
import fsl.utils.meta as meta import fsl.utils.meta as meta
import fsl.utils.transform as transform import fsl.utils.deprecated as deprecated
import fsl.transform.affine as affine
import fsl.utils.notifier as notifier import fsl.utils.notifier as notifier
import fsl.utils.naninfrange as nir
import fsl.utils.memoize as memoize import fsl.utils.memoize as memoize
import fsl.utils.path as fslpath import fsl.utils.path as fslpath
import fsl.utils.deprecated as deprecated import fsl.utils.bids as fslbids
import fsl.data.constants as constants import fsl.data.constants as constants
import fsl.data.imagewrapper as imagewrapper
PathLike = Union[str, Path]
ImageSource = Union[PathLike, nib.Nifti1Image, np.ndarray, 'Image']
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -62,14 +70,14 @@ log = logging.getLogger(__name__) ...@@ -62,14 +70,14 @@ log = logging.getLogger(__name__)
ALLOWED_EXTENSIONS = ['.nii.gz', '.nii', '.img', '.hdr', '.img.gz', '.hdr.gz'] ALLOWED_EXTENSIONS = ['.nii.gz', '.nii', '.img', '.hdr', '.img.gz', '.hdr.gz']
"""The file extensions which we understand. This list is used as the default """The file extensions which we understand. This list is used as the default
if the ``allowedExts`` parameter is not passed to any of the functions if the ``allowedExts`` parameter is not passed to any of the ``*Ext``
below. functions, or the :func:`looksLikeImage` function.
""" """
EXTENSION_DESCRIPTIONS = ['Compressed NIFTI images', EXTENSION_DESCRIPTIONS = ['Compressed NIFTI images',
'NIFTI images', 'NIFTI images',
'ANALYZE75 images', 'NIFTI/ANALYZE75 images',
'NIFTI/ANALYZE75 headers', 'NIFTI/ANALYZE75 headers',
'Compressed NIFTI/ANALYZE75 images', 'Compressed NIFTI/ANALYZE75 images',
'Compressed NIFTI/ANALYZE75 headers'] 'Compressed NIFTI/ANALYZE75 headers']
...@@ -89,6 +97,47 @@ Made available in this module for convenience. ...@@ -89,6 +97,47 @@ Made available in this module for convenience.
""" """
class DataManager:
"""The ``DataManager`` defines an interface which may be used by
:class:`Image` instances for managing access and modification of
data in a ``nibabel.Nifti1Image`` image.
"""
def copy(self, nibImage : nib.Nifti1Image):
"""Return a copy of this ``DataManager``, associated with the
given ``nibImage``,
"""
raise NotImplementedError()
@property
def dataRange(self):
"""Return the image minimum/maximum data values as a ``(min, max)``
tuple.
"""
raise NotImplementedError()
@property
def editable(self):
"""Return ``True`` if the image data can be modified, ``False``
otherwise. The default implementation returns ``True``.
"""
return True
def __getitem__(self, slc):
"""Return data at ``slc``. """
raise NotImplementedError()
def __setitem__(self, slc, val):
"""Set data at ``slc`` to ``val``. """
raise NotImplementedError()
class Nifti(notifier.Notifier, meta.Meta): class Nifti(notifier.Notifier, meta.Meta):
"""The ``Nifti`` class is intended to be used as a base class for """The ``Nifti`` class is intended to be used as a base class for
things which either are, or are associated with, a NIFTI image. things which either are, or are associated with, a NIFTI image.
...@@ -108,6 +157,9 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -108,6 +157,9 @@ class Nifti(notifier.Notifier, meta.Meta):
``shape`` A list/tuple containing the number of voxels along ``shape`` A list/tuple containing the number of voxels along
each image dimension. each image dimension.
``realShape`` A list/tuple containing the actual image data shape
- see notes below.
``pixdim`` A list/tuple containing the length of one voxel ``pixdim`` A list/tuple containing the length of one voxel
along each image dimension. along each image dimension.
...@@ -131,19 +183,51 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -131,19 +183,51 @@ class Nifti(notifier.Notifier, meta.Meta):
property if you need to know what type of image you are dealing with. property if you need to know what type of image you are dealing with.
The ``shape`` attribute may not precisely match the image shape as **Image dimensionality**
reported in the NIFTI header, because trailing dimensions of size 1 are
squeezed out. See the :meth:`__determineShape` and :meth:`mapIndices`
methods. By default, the ``Nifti`` and ``Image`` classes "normalise" the
dimensionality of an image to always have at least 3 dimensions, and so
that trailing dimensions of length 1 are removed. Therefore, the
``shape`` attribute may not precisely match the image shape as reported in
the NIFTI header, because trailing dimensions of size 1 are squeezed
out. The actual image data shape can be queried via the :meth:`realShape`
property. Note also that the :class:`Image` class expects data
access/slicing to be with respect to the normalised shape, not the real
shape. See the :meth:`__determineShape` method and the
:func:`canonicalSliceObj` function for more details.
**Affine transformations**
The ``Nifti`` class is aware of three coordinate systems:
- The ``voxel`` coordinate system, used to access image data
**The affine transformation** - The ``world`` coordinate system, where voxel coordinates are
transformed into a millimetre coordinate system, defined by the
``sform`` and/or ``qform`` elements of the NIFTI header.
- The ``fsl`` coordinate system, where voxel coordinates are scaled by
the ``pixdim`` values in the NIFTI header, and the X axis is inverted
if the voxel-to-world affine has a positive determinant. The
coordinates ``(0, 0, 0)`` correspond to the corner of voxel
``(0, 0, 0)``.
The :meth:`voxToWorldMat` and :meth:`worldToVoxMat` attributes contain - The ``scaled`` coordinate system, where voxel coordinates are scaled by
transformation matrices for transforming between voxel and world the ``pixdim`` values in the NIFTI header.
coordinates. The ``Nifti`` class follows the same process as ``nibabel``
in selecting the affine (see The :meth:`getAffine` method is a simple means of acquiring an affine
which will transform between any of these coordinate systems.
See `here <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FLIRT/FAQ#What_is_the_format_of_the_matrix_used_by_FLIRT.2C_and_how_does_it_relate_to_the_transformation_parameters.3F>`_
for more details on the ``fsl`` coordinate system.
The ``Nifti`` class follows the same process as ``nibabel`` in determining
the ``voxel`` to ``world`` affine (see
http://nipy.org/nibabel/nifti_images.html#the-nifti-affines): http://nipy.org/nibabel/nifti_images.html#the-nifti-affines):
...@@ -176,7 +260,7 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -176,7 +260,7 @@ class Nifti(notifier.Notifier, meta.Meta):
A ``Nifti`` instance expects to be passed either a A ``Nifti`` instance expects to be passed either a
``nibabel.nifti1.Nifti1Header`` or a ``nibabel.nifti2.Nifti2Header``, but ``nibabel.nifti1.Nifti1Header`` or a ``nibabel.nifti2.Nifti2Header``, but
can als encapsulate a ``nibabel.analyze.AnalyzeHeader``. In this case: can also encapsulate a ``nibabel.analyze.AnalyzeHeader``. In this case:
- The image voxel orientation is assumed to be R->L, P->A, I->S. - The image voxel orientation is assumed to be R->L, P->A, I->S.
...@@ -206,18 +290,20 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -206,18 +290,20 @@ class Nifti(notifier.Notifier, meta.Meta):
=============== ======================================================== =============== ========================================================
``'transform'`` The affine transformation matrix has changed. This topic ``'transform'`` The affine transformation matrix has changed. This topic
will occur when the ``voxToWorldMat`` is changed. will occur when the :meth:`voxToWorldMat` is changed.
``'header'`` A header field has changed. This will occur when the
:meth:`intent` is changed.
=============== ======================================================== =============== ========================================================
""" """ # noqa
def __init__(self, header): def __init__(self, header):
"""Create a ``Nifti`` object. """Create a ``Nifti`` object.
:arg header: A :class:`nibabel.nifti1.Nifti1Header`, :arg header: A :class:`nibabel.nifti1.Nifti1Header`,
:class:`nibabel.nifti2.Nifti2Header`, or :class:`nibabel.nifti2.Nifti2Header`, or
``nibabel.analyze.AnalyzeHeader`` to be used as the ``nibabel.analyze.AnalyzeHeader`` to be used as the
image header. image header.
""" """
# Nifti2Header is a sub-class of Nifti1Header, # Nifti2Header is a sub-class of Nifti1Header,
...@@ -226,23 +312,64 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -226,23 +312,64 @@ class Nifti(notifier.Notifier, meta.Meta):
if not isinstance(header, nib.analyze.AnalyzeHeader): if not isinstance(header, nib.analyze.AnalyzeHeader):
raise ValueError('Unrecognised header: {}'.format(header)) raise ValueError('Unrecognised header: {}'.format(header))
header = header origShape, shape, pixdim = Nifti.determineShape(header)
origShape, shape, pixdim = self.__determineShape(header) voxToWorldMat = Nifti.determineAffine(header)
affines, isneuro = Nifti.generateAffines(voxToWorldMat,
shape,
pixdim)
self.__header = header
self.__shape = shape
self.__origShape = origShape
self.__pixdim = pixdim
self.__affines = affines
self.__isNeurological = isneuro
def __del__(self):
"""Clears the reference to the ``nibabel`` header object. """
self.__header = None
voxToWorldMat = self.__determineTransform(header) @staticmethod
worldToVoxMat = transform.invert(voxToWorldMat) def determineShape(header):
"""This method is called by :meth:`__init__`. It figures out the actual
shape of the image data, and the zooms/pixdims for each data axis. Any
empty trailing dimensions are squeezed, but the returned shape is
guaranteed to be at least 3 dimensions. Returns:
- A sequence/tuple containing the image shape, as reported in the
header.
- A sequence/tuple containing the effective image shape.
- A sequence/tuple containing the zooms/pixdims.
"""
# The canonicalShape function figures out
# the data shape that we should use.
origShape = list(header.get_data_shape())
shape = canonicalShape(origShape)
pixdims = list(header.get_zooms())
# if get_zooms() doesn't return at
# least len(shape) values, we'll
# fall back to the pixdim field in
# the header.
if len(pixdims) < len(shape):
pixdims = header['pixdim'][1:]
pixdims = pixdims[:len(shape)]
self.header = header # should never happen, but if we only
self.__shape = shape # have zoom values for the original
self.__intent = header.get('intent_code', # (< 3D) shape, pad them with 1s.
constants.NIFTI_INTENT_NONE) if len(pixdims) < len(shape):
self.__origShape = origShape pixdims = pixdims + [1] * (len(shape) - len(pixdims))
self.__pixdim = pixdim
self.__voxToWorldMat = voxToWorldMat return origShape, shape, pixdims
self.__worldToVoxMat = worldToVoxMat
def __determineTransform(self, header): @staticmethod
def determineAffine(header):
"""Called by :meth:`__init__`. Figures out the voxel-to-world """Called by :meth:`__init__`. Figures out the voxel-to-world
coordinate transformation matrix that is associated with this coordinate transformation matrix that is associated with this
``Nifti`` instance. ``Nifti`` instance.
...@@ -252,33 +379,40 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -252,33 +379,40 @@ class Nifti(notifier.Notifier, meta.Meta):
# specially, as FNIRT clobbers the # specially, as FNIRT clobbers the
# sform section of the NIFTI header # sform section of the NIFTI header
# to store other data. # to store other data.
# intent = header.get('intent_code', -1)
# TODO Nibabel <= 2.1.0 has a bug in header.get qform = header.get('qform_code', -1)
# for fields with a value of 0. When this sform = header.get('sform_code', -1)
# bug gets fixed, we can replace the if-else
# block below with this: # FNIRT non-linear coefficient files
# # clobber the sform/qform/intent
# intent = header.get('intent_code', -1) # and pixdims of the nifti header,
# qform = header.get('qform_code', -1) # so we can't correctly place it in
# sform = header.get('sform_code', -1) # the world coordinate system. See
# # $FSLDIR/src/fnirt/fnirt_file_writer.cpp
# TODO Change this in fslpy 2.0.0 # and fsl.transform.nonlinear for more
# # details.
if isinstance(header, nib.nifti1.Nifti1Header): if intent in (constants.FSL_DCT_COEFFICIENTS,
intent = header['intent_code']
qform = header['qform_code']
sform = header['sform_code']
else:
intent = -1
qform = -1
sform = -1
if intent in (constants.FSL_FNIRT_DISPLACEMENT_FIELD,
constants.FSL_CUBIC_SPLINE_COEFFICIENTS, constants.FSL_CUBIC_SPLINE_COEFFICIENTS,
constants.FSL_DCT_COEFFICIENTS, constants.FSL_QUADRATIC_SPLINE_COEFFICIENTS,
constants.FSL_QUADRATIC_SPLINE_COEFFICIENTS): constants.FSL_TOPUP_CUBIC_SPLINE_COEFFICIENTS,
log.debug('FNIRT output image detected - using qform matrix') constants.FSL_TOPUP_QUADRATIC_SPLINE_COEFFICIENTS):
voxToWorldMat = np.array(header.get_qform())
log.debug('FNIRT coefficient field detected - generating affine')
# Knot spacing is stored in the pixdims
# (specified in terms of reference image
# voxels), and reference image pixdims
# are stored as intent code parameters.
# If we combine the two, we can at least
# get the shape/size of the coefficient
# field about right
knotpix = header.get_zooms()[:3]
refpix = (header.get('intent_p1', 1) or 1,
header.get('intent_p2', 1) or 1,
header.get('intent_p3', 1) or 1)
voxToWorldMat = affine.concat(
affine.scaleOffsetXform(refpix, 0),
affine.scaleOffsetXform(knotpix, 0))
# If the qform or sform codes are unknown, # If the qform or sform codes are unknown,
# then we can't assume that the transform # then we can't assume that the transform
...@@ -292,7 +426,7 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -292,7 +426,7 @@ class Nifti(notifier.Notifier, meta.Meta):
# should just be a straight scaling matrix. # should just be a straight scaling matrix.
elif qform == 0 and sform == 0: elif qform == 0 and sform == 0:
pixdims = header.get_zooms() pixdims = header.get_zooms()
voxToWorldMat = transform.scaleOffsetXform(pixdims, 0) voxToWorldMat = affine.scaleOffsetXform(pixdims, 0)
# Otherwise we let nibabel decide # Otherwise we let nibabel decide
# which transform to use. # which transform to use.
...@@ -302,40 +436,119 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -302,40 +436,119 @@ class Nifti(notifier.Notifier, meta.Meta):
return voxToWorldMat return voxToWorldMat
def __determineShape(self, header): @staticmethod
"""This method is called by :meth:`__init__`. It figures out the actual def generateAffines(voxToWorldMat, shape, pixdim):
shape of the image data, and the zooms/pixdims for each data axis. Any """Called by :meth:`__init__`, and the :meth:`voxToWorldMat` setter.
empty trailing dimensions are squeezed, but the returned shape is Generates and returns a dictionary containing affine transformations
guaranteed to be at least 3 dimensions. Returns: between the ``voxel``, ``fsl``, ``scaled``, and ``world`` coordinate
systems. These affines are accessible via the :meth:`getAffine`
method.
:arg voxToWorldMat: The voxel-to-world affine transformation
:arg shape: Image shape (number of voxels along each dimension
:arg pixdim: Image pixdims (size of one voxel along each
dimension)
:returns: A tuple containing:
- a dictionary of affine transformations between
each pair of coordinate systems
- ``True`` if the image is to be considered
"neurological", ``False`` otherwise - see the
:meth:`isNeurological` method.
"""
- A sequence/tuple containing the image shape, as reported in the affines = {}
header. shape = list(shape[ :3])
- A sequence/tuple containing the effective image shape. pixdim = list(pixdim[:3])
- A sequence/tuple containing the zooms/pixdims. voxToScaledMat = np.diag(pixdim + [1.0])
voxToFSLMat = np.array(voxToScaledMat)
isneuro = npla.det(voxToWorldMat) > 0
if isneuro:
x = (shape[0] - 1) * pixdim[0]
flip = affine.scaleOffsetXform([-1, 1, 1],
[ x, 0, 0])
voxToFSLMat = affine.concat(flip, voxToFSLMat)
affines['voxel', 'voxel'] = np.eye(4)
affines['voxel', 'scaled'] = voxToScaledMat
affines['voxel', 'fsl'] = voxToFSLMat
affines['voxel', 'world'] = voxToWorldMat
affines['scaled', 'scaled'] = np.eye(4)
affines['scaled', 'voxel'] = affine.invert(voxToScaledMat)
affines['scaled', 'fsl'] = affine.concat(affines['voxel', 'fsl'],
affines['scaled', 'voxel'])
affines['scaled', 'world'] = affine.concat(affines['voxel', 'world'],
affines['scaled', 'voxel'])
affines['fsl', 'fsl'] = np.eye(4)
affines['fsl', 'voxel'] = affine.invert(voxToFSLMat)
affines['fsl', 'scaled'] = affine.invert(affines['scaled', 'fsl'])
affines['fsl', 'world'] = affine.concat(affines['voxel', 'world'],
affines['fsl', 'voxel'])
affines['world', 'world'] = np.eye(4)
affines['world', 'voxel'] = affine.invert(voxToWorldMat)
affines['world', 'scaled'] = affine.concat(affines['voxel', 'scaled'],
affines['world', 'voxel'])
affines['world', 'fsl'] = affine.concat(affines['voxel', 'fsl'],
affines['world', 'voxel'])
return affines, isneuro
@staticmethod
def identifyAffine(image, xform, from_=None, to=None):
"""Attempt to identify the source or destination space for the given
affine.
``xform`` is assumed to be an affine transformation which can be used
to transform coordinates between two coordinate systems associated with
``image``.
If one of ``from_`` or ``to`` is provided, the other will be derived.
If neither are provided, both will be derived. See the
:meth:`.Nifti.getAffine` method for details on the valild values that
``from_`` and ``to`` may take.
:arg image: :class:`.Nifti` instance associated with the affine.
:arg xform: ``(4, 4)`` ``numpy`` array encoding an affine
transformation
:arg from_: Label specifying the coordinate system which ``xform``
takes as input
:arg to: Label specifying the coordinate system which ``xform``
produces as output
:returns: A tuple containing:
- A label for the ``from_`` coordinate system
- A label for the ``to`` coordinate system
""" """
# The canonicalShape function figures out if (from_ is not None) and (to is not None):
# the data shape that we should use. return from_, to
origShape = list(header.get_data_shape())
shape = canonicalShape(origShape)
pixdims = list(header.get_zooms())
# if get_zooms() doesn't return at if from_ is not None: froms = [from_]
# least len(shape) values, we'll else: froms = ['voxel', 'scaled', 'fsl', 'world']
# fall back to the pixdim field in if to is not None: tos = [to]
# the header. else: tos = ['voxel', 'scaled', 'fsl', 'world']
if len(pixdims) < len(shape):
pixdims = header['pixdim'][1:]
pixdims = pixdims[:len(shape)] for from_, to in it.product(froms, tos):
return origShape, shape, pixdims candidate = image.getAffine(from_, to)
if np.all(np.isclose(candidate, xform)):
return from_, to
raise ValueError('Could not identify affine')
def strval(self, key): def strval(self, key):
"""Returns the specified NIFTI header field, converted to a python """Returns the specified NIFTI header field, converted to a python
string, correctly null-terminated, and with non-printable characters string, with non-printable characters removed.
removed.
This method is used to sanitise some NIFTI header fields. The default This method is used to sanitise some NIFTI header fields. The default
Python behaviour for converting a sequence of bytes to a string is to Python behaviour for converting a sequence of bytes to a string is to
...@@ -354,9 +567,28 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -354,9 +567,28 @@ class Nifti(notifier.Notifier, meta.Meta):
try: val = bytes(val).partition(b'\0')[0] try: val = bytes(val).partition(b'\0')[0]
except Exception: val = bytes(val) except Exception: val = bytes(val)
val = val.decode('ascii') val = [chr(c) for c in val]
val = ''.join(c for c in val if c in string.printable).strip()
return ''.join([c for c in val if c in string.printable]).strip() return val
@property
def header(self):
"""Return a reference to the ``nibabel`` header object. """
return self.__header
@header.setter
def header(self, header):
"""Replace the ``nibabel`` header object managed by this ``Nifti``
with a new header. The new header must have the same dimensions,
voxel size, and orientation as the old one.
"""
new = Nifti(header)
if not (self.sameSpace(new) and self.ndim == new.ndim):
raise ValueError('Incompatible header')
self.__header = header
@property @property
...@@ -375,15 +607,36 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -375,15 +607,36 @@ class Nifti(notifier.Notifier, meta.Meta):
elif isinstance(self.header, nib.nifti1.Nifti1Header): return 1 elif isinstance(self.header, nib.nifti1.Nifti1Header): return 1
elif isinstance(self.header, nib.analyze.AnalyzeHeader): return 0 elif isinstance(self.header, nib.analyze.AnalyzeHeader): return 0
else: raise RuntimeError('Unrecognised header: {}'.format(self.header)) else: raise RuntimeError(f'Unrecognised header: {self.header}')
@property @property
def shape(self): def shape(self):
"""Returns a tuple containing the image data shape. """ """Returns a tuple containing the normalised image data shape. The
image shape is at least three dimensions, and trailing dimensions of
length 1 are squeezed out.
"""
return tuple(self.__shape) return tuple(self.__shape)
@property
def realShape(self):
"""Returns a tuple containing the image data shape, as reported in
the NIfTI image header.
"""
return tuple(self.__origShape)
@property
def ndim(self):
"""Returns the number of dimensions in this image. This number may not
match the number of dimensions specified in the NIFTI header, as
trailing dimensions of length 1 are ignored. But it is guaranteed to be
at least 3.
"""
return len(self.__shape)
@property @property
def pixdim(self): def pixdim(self):
"""Returns a tuple containing the image pixdims (voxel sizes).""" """Returns a tuple containing the image pixdims (voxel sizes)."""
...@@ -392,9 +645,58 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -392,9 +645,58 @@ class Nifti(notifier.Notifier, meta.Meta):
@property @property
def intent(self): def intent(self):
"""Returns the NIFTI intent code of this image. """Returns the NIFTI intent code of this image. """
return self.header.get('intent_code', constants.NIFTI_INTENT_NONE)
@property
def niftiDataType(self):
"""Returns the NIFTI data type code of this image. """
dt = self.header.get('datatype', constants.NIFTI_DT_UNKNOWN)
return int(dt)
@property
def niftiDataTypeSize(self):
"""Returns the number of bits per voxel, according to the NIfTI
data type. Returns ``None`` if the data type is not recognised.
""" """
return self.__intent sizes = {
constants.NIFTI_DT_BINARY : 1,
constants.NIFTI_DT_UNSIGNED_CHAR : 8,
constants.NIFTI_DT_SIGNED_SHORT : 16,
constants.NIFTI_DT_SIGNED_INT : 32,
constants.NIFTI_DT_FLOAT : 32,
constants.NIFTI_DT_COMPLEX : 64,
constants.NIFTI_DT_DOUBLE : 64,
constants.NIFTI_DT_RGB : 24,
constants.NIFTI_DT_UINT8 : 8,
constants.NIFTI_DT_INT16 : 16,
constants.NIFTI_DT_INT32 : 32,
constants.NIFTI_DT_FLOAT32 : 32,
constants.NIFTI_DT_COMPLEX64 : 64,
constants.NIFTI_DT_FLOAT64 : 64,
constants.NIFTI_DT_RGB24 : 24,
constants.NIFTI_DT_INT8 : 8,
constants.NIFTI_DT_UINT16 : 16,
constants.NIFTI_DT_UINT32 : 32,
constants.NIFTI_DT_INT64 : 64,
constants.NIFTI_DT_UINT64 : 64,
constants.NIFTI_DT_FLOAT128 : 128,
constants.NIFTI_DT_COMPLEX128 : 128,
constants.NIFTI_DT_COMPLEX256 : 256,
constants.NIFTI_DT_RGBA32 : 32}
return sizes.get(self.niftiDataType, None)
@intent.setter
def intent(self, val):
"""Sets the NIFTI intent code of this image. """
# analyze has no intent
if (self.niftiVersion > 0) and (val != self.intent):
self.header.set_intent(val, allow_unknown=True)
self.notify(topic='header')
@property @property
...@@ -428,12 +730,42 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -428,12 +730,42 @@ class Nifti(notifier.Notifier, meta.Meta):
return units return units
def getAffine(self, from_, to):
"""Return an affine transformation which can be used to transform
coordinates from ``from_`` to ``to``.
Valid values for the ``from_`` and ``to`` arguments are:
- ``'voxel'``: The voxel coordinate system
- ``'world'``: The world coordinate system, as defined by the image
sform/qform
- ``'fsl'``: The FSL coordinate system (scaled voxels, with a
left-right flip if the sform/qform has a positive determinant)
- ``'scaled'``: Scaled voxel coordinate system (equivalent to
``'fsl'`` without the flip).
:arg from_: Source coordinate system
:arg to: Destination coordinate system
:returns: A ``numpy`` array of shape ``(4, 4)``
"""
from_ = from_.lower()
to = to .lower()
if from_ not in ('voxel', 'scaled', 'fsl', 'world') or \
to not in ('voxel', 'scaled', 'fsl', 'world'):
raise ValueError('Invalid source/reference spaces: "{}" -> "{}".'
'Recognised spaces are "voxel", "fsl", "scaled", '
'and "world"'.format(from_, to))
return np.copy(self.__affines[from_, to])
@property @property
def worldToVoxMat(self): def worldToVoxMat(self):
"""Returns a ``numpy`` array of shape ``(4, 4)`` containing an """Returns a ``numpy`` array of shape ``(4, 4)`` containing an
affine transformation from world coordinates to voxel coordinates. affine transformation from world coordinates to voxel coordinates.
""" """
return np.array(self.__worldToVoxMat) return self.getAffine('world', 'voxel')
@property @property
...@@ -441,7 +773,7 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -441,7 +773,7 @@ class Nifti(notifier.Notifier, meta.Meta):
"""Returns a ``numpy`` array of shape ``(4, 4)`` containing an """Returns a ``numpy`` array of shape ``(4, 4)`` containing an
affine transformation from voxel coordinates to world coordinates. affine transformation from voxel coordinates to world coordinates.
""" """
return np.array(self.__voxToWorldMat) return self.getAffine('voxel', 'world')
@voxToWorldMat.setter @voxToWorldMat.setter
...@@ -465,8 +797,12 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -465,8 +797,12 @@ class Nifti(notifier.Notifier, meta.Meta):
header.set_sform(xform, code=sformCode) header.set_sform(xform, code=sformCode)
self.__voxToWorldMat = self.__determineTransform(header) affines, isneuro = Nifti.generateAffines(xform,
self.__worldToVoxMat = transform.invert(self.__voxToWorldMat) self.shape,
self.pixdim)
self.__affines = affines
self.__isNeurological = isneuro
log.debug('Affine changed:\npixdims: ' log.debug('Affine changed:\npixdims: '
'%s\nsform: %s\nqform: %s', '%s\nsform: %s\nqform: %s',
...@@ -477,29 +813,34 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -477,29 +813,34 @@ class Nifti(notifier.Notifier, meta.Meta):
self.notify(topic='transform') self.notify(topic='transform')
def mapIndices(self, sliceobj): @property
"""Adjusts the given slice object so that it may be used to index the @deprecated.deprecated('3.22.0', '4.0.0', 'Use getAffine instead')
underlying ``nibabel`` NIFTI image object. def voxToScaledVoxMat(self):
"""Returns a transformation matrix which transforms from ``voxel``
See the :meth:`__determineShape` method. coordinates into ``fsl`` coordinates, with a left-right flip
if the image appears to be stored in neurological order.
:arg sliceobj: Something that can be used to slice a See http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FLIRT/FAQ#What_is_the\
multi-dimensional array, e.g. ``arr[sliceobj]``. _format_of_the_matrix_used_by_FLIRT.2C_and_how_does_it_relate_to\
_the_transformation_parameters.3F
""" """
return self.getAffine('voxel', 'fsl')
# How convenient - nibabel has a function
# that does the dirty work for us.
return fileslice.canonical_slicers(sliceobj, self.__origShape)
@property @property
def ndim(self): @deprecated.deprecated('3.22.0', '4.0.0', 'Use getAffine instead')
"""Returns the number of dimensions in this image. This number may not def scaledVoxToVoxMat(self):
match the number of dimensions specified in the NIFTI header, as """Returns a transformation matrix which transforms from ``fsl``
trailing dimensions of length 1 are ignored. But it is guaranteed to be coordinates into ``voxel`` coordinates, the inverse of the
at least 3. :meth:`voxToScaledVoxMat` transform.
""" """
return len(self.__shape) return self.getAffine('fsl', 'voxel')
@deprecated.deprecated('3.9.0', '4.0.0', 'Use canonicalSliceObj instead')
def mapIndices(self, sliceobj):
"""Deprecated - use :func:`canonicalSliceObj` instead. """
return fileslice.canonical_slicers(sliceobj, self.__origShape)
def getXFormCode(self, code=None): def getXFormCode(self, code=None):
...@@ -515,6 +856,7 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -515,6 +856,7 @@ class Nifti(notifier.Notifier, meta.Meta):
- :data:`~.constants.NIFTI_XFORM_ALIGNED_ANAT` - :data:`~.constants.NIFTI_XFORM_ALIGNED_ANAT`
- :data:`~.constants.NIFTI_XFORM_TALAIRACH` - :data:`~.constants.NIFTI_XFORM_TALAIRACH`
- :data:`~.constants.NIFTI_XFORM_MNI_152` - :data:`~.constants.NIFTI_XFORM_MNI_152`
- :data:`~.constants.NIFTI_XFORM_TEMPLATE_OTHER`
- :data:`~.constants.NIFTI_XFORM_ANALYZE` - :data:`~.constants.NIFTI_XFORM_ANALYZE`
""" """
...@@ -542,12 +884,14 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -542,12 +884,14 @@ class Nifti(notifier.Notifier, meta.Meta):
if sform_code != constants.NIFTI_XFORM_UNKNOWN: code = sform_code if sform_code != constants.NIFTI_XFORM_UNKNOWN: code = sform_code
elif qform_code != constants.NIFTI_XFORM_UNKNOWN: code = qform_code elif qform_code != constants.NIFTI_XFORM_UNKNOWN: code = qform_code
# Invalid values # Invalid code (the maxmimum NIFTI_XFORM_*
if code not in range(5): # code value is 5 at present)
if code not in range(6):
code = constants.NIFTI_XFORM_UNKNOWN code = constants.NIFTI_XFORM_UNKNOWN
return int(code) return int(code)
# TODO Check what has worse performance - hashing # TODO Check what has worse performance - hashing
# a 4x4 array (via memoizeMD5), or the call # a 4x4 array (via memoizeMD5), or the call
# to aff2axcodes. I'm guessing the latter, # to aff2axcodes. I'm guessing the latter,
...@@ -565,7 +909,6 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -565,7 +909,6 @@ class Nifti(notifier.Notifier, meta.Meta):
return nib.orientations.aff2axcodes(xform, inaxes) return nib.orientations.aff2axcodes(xform, inaxes)
@memoize.Instanceify(memoize.memoize)
def isNeurological(self): def isNeurological(self):
"""Returns ``True`` if it looks like this ``Nifti`` object has a """Returns ``True`` if it looks like this ``Nifti`` object has a
neurological voxel orientation, ``False`` otherwise. This test is neurological voxel orientation, ``False`` otherwise. This test is
...@@ -584,51 +927,7 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -584,51 +927,7 @@ class Nifti(notifier.Notifier, meta.Meta):
_format_of_the_matrix_used_by_FLIRT.2C_and_how_does_it_relate_to\ _format_of_the_matrix_used_by_FLIRT.2C_and_how_does_it_relate_to\
_the_transformation_parameters.3F _the_transformation_parameters.3F
""" """
import numpy.linalg as npla return self.__isNeurological
return npla.det(self.__voxToWorldMat) > 0
@property
def voxToScaledVoxMat(self):
"""Returns a transformation matrix which transforms from voxel
coordinates into scaled voxel coordinates, with a left-right flip
if the image appears to be stored in neurological order.
See http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FLIRT/FAQ#What_is_the\
_format_of_the_matrix_used_by_FLIRT.2C_and_how_does_it_relate_to\
_the_transformation_parameters.3F
"""
return self.__voxToScaledVoxMat()
@memoize.Instanceify(memoize.memoize)
def __voxToScaledVoxMat(self):
"""See :meth:`voxToScaledVoxMat`. """
shape = list(self.shape[ :3])
pixdim = list(self.pixdim[:3])
voxToPixdimMat = np.diag(pixdim + [1.0])
if self.isNeurological():
x = (shape[0] - 1) * pixdim[0]
flip = transform.scaleOffsetXform([-1, 1, 1], [x, 0, 0])
voxToPixdimMat = transform.concat(flip, voxToPixdimMat)
return voxToPixdimMat
@property
def scaledVoxToVoxMat(self):
"""Returns a transformation matrix which transforms from scaled voxels
into voxels, the inverse of the :meth:`voxToScaledVoxMat` transform.
"""
return self.__scaledVoxToVoxMat()
@memoize.Instanceify(memoize.memoize)
def __scaledVoxToVoxMat(self):
"""See :meth:`scaledVoxToVoxMat`. """
return transform.invert(self.voxToScaledVoxMat)
def sameSpace(self, other): def sameSpace(self, other):
...@@ -690,11 +989,79 @@ class Nifti(notifier.Notifier, meta.Meta): ...@@ -690,11 +989,79 @@ class Nifti(notifier.Notifier, meta.Meta):
return code return code
def adjust(self, pixdim=None, shape=None, origin=None):
"""Return a new ``Nifti`` object with the specified ``pixdim`` or
``shape``. The affine of the new ``Nifti`` is adjusted accordingly.
Only one of ``pixdim`` or ``shape`` can be specified.
See :func:`.affine.rescale` for the meaning of the ``origin`` argument.
Only the spatial dimensions may be adjusted - use the functions in
the :mod:`.image.resample` module if you need to adjust non-spatial
dimensions.
:arg pixdim: New voxel dimensions
:arg shape: New image shape
:arg origin: Voxel grid alignment - either ``'centre'`` (the default)
or ``'corner'``
:returns: A new ``Nifti`` object based on this one, with adjusted
pixdims, shape and affine.
"""
if ((pixdim is not None) and (shape is not None)) or \
((pixdim is None) and (shape is None)):
raise ValueError('Exactly one of pixdim or '
'shape must be specified')
if shape is not None: ndim = len(shape)
else: ndim = len(pixdim)
# We only allow adjustment of
# the spatial dimensions
if ndim != 3:
raise ValueError('Three dimensions must be specified')
oldShape = np.array(self.shape[ :ndim])
oldPixdim = np.array(self.pixdim[:ndim])
newShape = shape
newPixdim = pixdim
# if pixdims were specified,
# convert them into a shape,
# and vice versa
if newPixdim is not None:
newShape = oldShape * (oldPixdim / newPixdim)
else:
newPixdim = oldPixdim * (oldShape / newShape)
# Rescale the voxel-to-world affine
xform = affine.rescale(oldShape, newShape, origin)
xform = affine.concat(self.getAffine('voxel', 'world'), xform)
# Now that we've got our spatial
# scaling/offset matrix, pad the
# new shape/pixdims with those
# from any non-spatial dimensions
newShape = list(newShape) + list(self.shape[ 3:])
newPixdim = list(newPixdim) + list(self.pixdim[3:])
# And create the new header
# and we're away
header = self.header.copy()
header.set_data_shape(newShape)
header.set_zooms( newPixdim)
header.set_sform( xform)
header.set_qform( xform)
return Nifti(header)
class Image(Nifti): class Image(Nifti):
"""Class which represents a NIFTI image. Internally, the image is """Class which represents a NIFTI image. Internally, the image is
loaded/stored using a :mod:`nibabel.nifti1.Nifti1Image` or loaded/stored using a :mod:`nibabel.nifti1.Nifti1Image` or
:mod:`nibabel.nifti2.Nifti2Image`, and data access managed by a :mod:`nibabel.nifti2.Nifti2Image`. This class adds functionality for
:class:`.ImageWrapper`. loading metadata from JSON sidecar files, and for keeping track of
modifications to the image data.
In addition to the attributes added by the :meth:`Nifti.__init__` method, In addition to the attributes added by the :meth:`Nifti.__init__` method,
...@@ -710,20 +1077,78 @@ class Image(Nifti): ...@@ -710,20 +1077,78 @@ class Image(Nifti):
file from where it was loaded, or some other string file from where it was loaded, or some other string
describing its origin. describing its origin.
``dataRange`` The ``(min, max)`` image data values.
``nibImage`` A reference to the ``nibabel`` NIFTI image object. ``nibImage`` A reference to the ``nibabel`` NIFTI image object.
``saveState`` A boolean value which is ``True`` if this image is ``saveState`` A boolean value which is ``True`` if this image is
saved to disk, ``False`` if it is in-memory, or has saved to disk, ``False`` if it is in-memory, or has
been edited. been edited.
``dataRange`` The minimum/maximum values in the image. Depending upon
the value of the ``calcRange`` parameter to
:meth:`__init__`, this may be calculated when the ``Image``
is created, or may be incrementally updated as more image
data is loaded from disk.
============== =========================================================== ============== ===========================================================
*Data access*
The ``Image`` class supports access to and assignment of the image data
via the ``[]`` slice operator, e.g.::
img = Image('image.nii.gz')
val = img[20, 30, 25]
img[30, 40, 20] = 999
Internally, the image data is managed using one of the following methods:
1. For read-only access, the ``Image`` class delegates to the
underlying ``nibabel.Nifti1Image`` instance, accessing the data
via the ``Nifti1Image.dataobj`` attribute. Refer to
https://nipy.org/nibabel/nibabel_images.html#the-image-data-array for
more details.
2. As soon as any data is modified, the ``Image`` class will
load the image data as a numpy array into memory and will maintain its
own reference to the array for subsequent access. Note that this
array is entirely independent of any array that is cached by
the underlying ``nibabel.Nifti1Image`` object (refer to
https://nipy.org/nibabel/images_and_memory.html)
3. The ``nibabel.Nifti1Image`` class does not support indexing of image
data with boolean mask arrays (e.g. ``image[mask > 0]``). If an
attempt is made to access image data in this way, the ``Image`` class
will be loaded into memory in the same way as described in point 2
above.
4. For more complicated requirements, a :class:`DataManager`,
implementing custom data access management logic, can be provided when
an ``Image`` is created. If a ``DataManager`` is provided, an
internal reference to the data (see 2 above) will **not** be created or
maintained.
It is also possible to obtain a reference to a numpy array containing
the image data via the :meth:`data` method. However, modifications to
the returned array:
- will not result in any notifications (described below)
- will not affect the value of :meth:`saveState`
- have undefined semantics when a custom :class:`DataManager` is in use
*Image dimensionality*
The ``Image`` class abstracts away trailing image dimensions of length 1.
This means that if the header for a NIFTI image specifies that the image
has four dimensions, but the fourth dimension is of length 1, you do not
need to worry about indexing that fourth dimension. However, all NIFTI
images will be presented as having at least three dimensions, so if your
image header specifies a third dimension of length 1, you will still
need provide an index of 0 for that dimensions, for all data accesses.
*Notification of changes to an Image*
The ``Image`` class adds some :class:`.Notifier` topics to those which are The ``Image`` class adds some :class:`.Notifier` topics to those which are
already provided by the :class:`Nifti` class - listeners may register to already provided by the :class:`Nifti` class - listeners may register to
be notified of changes to the above properties, by registering on the be notified of changes to the above properties, by registering on the
...@@ -741,28 +1166,31 @@ class Image(Nifti): ...@@ -741,28 +1166,31 @@ class Image(Nifti):
image changes (i.e. data or ``voxToWorldMat`` is image changes (i.e. data or ``voxToWorldMat`` is
edited, or the image saved to disk). edited, or the image saved to disk).
``'dataRange'`` This topic is notified whenever the image data range ``'dataRange'`` Deprecated - No notifications are made on this topic.
is changed/adjusted.
=============== ====================================================== =============== ======================================================
""" """
def __init__(self, def __init__(self,
image, image : ImageSource = None,
name=None, name : str = None,
header=None, header : nib.Nifti1Header = None,
xform=None, xform : np.ndarray = None,
loadData=True, loadData : bool = None,
calcRange=True, calcRange : bool = None,
indexed=False, threaded : bool = None,
threaded=False, dataSource : PathLike = None,
dataSource=None, loadMeta : bool = False,
dataMgr : DataManager = None,
version : int = None,
**kwargs): **kwargs):
"""Create an ``Image`` object with the given image data or file name. """Create an ``Image`` object with the given image data or file name.
:arg image: A string containing the name of an image file to load, :arg image: A string containing the name of an image file to load,
or a :mod:`numpy` array, or a :mod:`nibabel` image or a Path object pointing to an image file, or a
object. :mod:`numpy` array, or a :mod:`nibabel` image object,
or an ``Image`` object. If not provided, a ``header``
and a ``dataMgr`` must be provided.
:arg name: A name for the image. :arg name: A name for the image.
...@@ -781,44 +1209,51 @@ class Image(Nifti): ...@@ -781,44 +1209,51 @@ class Image(Nifti):
``header`` are provided, the ``xform`` is used in ``header`` are provided, the ``xform`` is used in
preference to the header transformation. preference to the header transformation.
:arg loadData: If ``True`` (the default) the image data is loaded :arg loadData: Deprecated, has no effect
in to memory. Otherwise, only the image header
information is read, and the image data is kept
from disk. In either case, the image data is
accessed through an :class:`.ImageWrapper` instance.
The data may be loaded into memory later on via the
:meth:`loadData` method.
:arg calcRange: If ``True`` (the default), the image range is
calculated immediately (vi a call to
:meth:`calcRange`). Otherwise, the image range is
incrementally updated as more data is read from memory
or disk.
:arg indexed: Deprecated. Has no effect, and will be removed in :arg calcRange: Deprecated, has no effect
``fslpy`` 3.0.
:arg threaded: If ``True``, the :class:`.ImageWrapper` will use a :arg threaded: Deprecated, has no effect
separate thread for data range calculation. Defaults
to ``False``. Ignored if ``loadData`` is ``True``.
:arg dataSource: If ``image`` is not a file name, this argument may be :arg dataSource: If ``image`` is not a file name, this argument may be
used to specify the file from which the image was used to specify the file from which the image was
loaded. loaded.
:arg loadMeta: If ``True``, any metadata contained in JSON sidecar
files is loaded and attached to this ``Image`` via
the :class:`.Meta` interface. if ``False``, metadata
can be loaded at a later stage via the
:func:`loadMeta` function. Defaults to ``False``.
:arg dataMgr: Object implementing the :class:`DataManager`
interface, for managing access to the image data.
:arg version: NIfTI version - either 1 or 2. Only used when creating
an image from a numpy array, and when a ``header`` is
not provided. Defaults to the value dictated by the
``FSLOUTPUTTYPE`` environment variable.
All other arguments are passed through to the ``nibabel.load`` function All other arguments are passed through to the ``nibabel.load`` function
(if it is called). (if it is called).
""" """
nibImage = None if threaded is not None:
deprecated.warn('Image(threaded)', vin='3.9.0', rin='4.0.0',
msg='The threaded option has no effect')
if loadData is not None:
deprecated.warn('Image(loadData)', vin='3.9.0', rin='4.0.0',
msg='The loadData option has no effect')
if calcRange is not None:
deprecated.warn('Image(calcRange)', vin='3.9.0', rin='4.0.0',
msg='The calcRange option has no effect')
if indexed is not False: if version not in (None, 1, 2):
warnings.warn('The indexed argument is deprecated ' raise ValueError('Invalid value for version - only NIfTI '
'and has no effect', 'versions 1 and 2 are supported')
category=DeprecationWarning)
if loadData: nibImage = None
threaded = False saved = False
# Take a copy of the header if one has # Take a copy of the header if one has
# been provided # been provided
...@@ -841,11 +1276,14 @@ class Image(Nifti): ...@@ -841,11 +1276,14 @@ class Image(Nifti):
header.set_qform(xform, code=qform) header.set_qform(xform, code=qform)
# The image parameter may be the name of an image file # The image parameter may be the name of an image file
if isinstance(image, six.string_types): if isinstance(image, (str, Path)):
# resolve path to source if it is a sym-link
image = Path(image).resolve()
image = op.abspath(addExt(image)) image = op.abspath(addExt(image))
nibImage = nib.load(image, **kwargs) nibImage = nib.load(image, **kwargs)
header = nibImage.header
dataSource = image dataSource = image
saved = True
# Or a numpy array - we wrap it in a nibabel image, # Or a numpy array - we wrap it in a nibabel image,
# with an identity transformation (each voxel maps # with an identity transformation (each voxel maps
...@@ -856,11 +1294,15 @@ class Image(Nifti): ...@@ -856,11 +1294,15 @@ class Image(Nifti):
if header is not None: xform = header.get_best_affine() if header is not None: xform = header.get_best_affine()
else: xform = np.identity(4) else: xform = np.identity(4)
# We default to NIFTI1 and not # NIfTI1 or NIfTI2 - if version was provided,
# NIFTI2, because the rest of # use that, otherwise use the FSLOUTPUTTYPE
# FSL is not yet NIFTI2 compatible. # environment variable
if header is None: if header is None:
ctr = nib.nifti1.Nifti1Image outputType = defaultOutputType()
if version == 2: ctr = nib.Nifti2Image
elif version == 1: ctr = nib.Nifti1Image
elif 'NIFTI2' in outputType.name: ctr = nib.Nifti2Image
else: ctr = nib.Nifti1Image
# make sure that the data type is correct, # make sure that the data type is correct,
# in case this header was passed in from # in case this header was passed in from
...@@ -878,18 +1320,36 @@ class Image(Nifti): ...@@ -878,18 +1320,36 @@ class Image(Nifti):
ctr = nib.analyze.AnalyzeImage ctr = nib.analyze.AnalyzeImage
nibImage = ctr(image, xform, header=header) nibImage = ctr(image, xform, header=header)
header = nibImage.header
# otherwise, we assume that it is a nibabel image # If it's an Image object, we
else: # just take the nibabel image
elif isinstance(image, Image):
nibImage = image.nibImage
header = image.header
# otherwise, we assume that
# it is a nibabel image
elif image is not None:
nibImage = image nibImage = image
header = nibImage.header
# If an image is not provided,
# a DataManager must be provided
elif dataMgr is None:
raise ValueError('A DataManager must be provided '
'if an image is not provided')
if header is None:
raise ValueError('Could not identify/create NIfTI header')
# Figure out the name of this image, if # Figure out the name of this image, if
# it has not beenbeen explicitly passed in # it has not been explicitly passed in
if name is None: if name is None:
# If this image was loaded # If this image was loaded
# from disk, use the file name. # from disk, use the file name.
if isinstance(image, six.string_types): if isinstance(image, (str, Path)):
name = removeExt(op.basename(image)) name = removeExt(op.basename(image))
# Or the image was created from a numpy array # Or the image was created from a numpy array
...@@ -900,28 +1360,31 @@ class Image(Nifti): ...@@ -900,28 +1360,31 @@ class Image(Nifti):
else: else:
name = 'Nibabel image' name = 'Nibabel image'
Nifti.__init__(self, nibImage.header) Nifti.__init__(self, header)
self.name = name self.name = name
self.__lName = '{}_{}'.format(id(self), self.name) self.__lName = f'{id(self)}_{self.name}'
self.__dataSource = dataSource self.__dataSource = dataSource
self.__threaded = threaded self.__nibImage = nibImage
self.__nibImage = nibImage self.__saveState = saved
self.__saveState = dataSource is not None self.__dataMgr = dataMgr
self.__imageWrapper = imagewrapper.ImageWrapper(self.nibImage, self.__dataRange = None
self.name, self.__data = None
loadData=loadData,
threaded=threaded)
# Listen to ourself for changes # Listen to ourself for changes
# to the voxToWorldMat, so we # to header attributse so we
# can update the saveState. # can update the saveState.
self.register(self.name, self.__transformChanged, topic='transform') self.register(self.name, self.__headerChanged, topic='transform')
self.register(self.name, self.__headerChanged, topic='header')
if calcRange: # try and load metadata
self.calcRange() # from JSON sidecar files
if self.dataSource is not None and loadMeta:
self.__imageWrapper.register(self.__lName, self.__dataRangeChanged) try:
self.updateMeta(loadMetadata(self))
except Exception as e:
log.warning('Failed to load metadata for %s: %s',
self.dataSource, e)
def __hash__(self): def __hash__(self):
...@@ -946,16 +1409,30 @@ class Image(Nifti): ...@@ -946,16 +1409,30 @@ class Image(Nifti):
def __del__(self): def __del__(self):
"""Closes any open file handles, and clears some references. """ """Closes any open file handles, and clears some references. """
self.header = None # Nifti class may have
self.__nibImage = None # been GC'd at shutdown
self.__imageWrapper = None if Nifti is not None:
Nifti.__del__(self)
self.__nibImage = None
self.__dataMgr = None
self.__data = None
@deprecated.deprecated('3.9.0', '4.0.0',
'The Image class no longer uses an ImageWrapper')
def getImageWrapper(self): def getImageWrapper(self):
"""Returns the :class:`.ImageWrapper` instance used to manage """Returns the :class:`.ImageWrapper` instance used to manage
access to the image data. access to the image data.
""" """
return self.__imageWrapper return None
@property
def dataManager(self):
"""Return the :class:`.DataManager` associated with this ``Image``,
if one was specified when it was created.
"""
return self.__dataMgr
@property @property
...@@ -969,19 +1446,49 @@ class Image(Nifti): ...@@ -969,19 +1446,49 @@ class Image(Nifti):
@property @property
def nibImage(self): def nibImage(self):
"""Returns a reference to the ``nibabel`` NIFTI image instance. """Returns a reference to the ``nibabel`` NIFTI image instance.
Note that if the image data has been modified through this ``Image``,
it will be out of sync with what is returned by the ``nibabel`` object,
until a call to :meth:`save` is made.
""" """
return self.__nibImage return self.__nibImage
@property @property
def data(self): def data(self):
"""Returns the image data as a ``numpy`` array. """Returns the image data as a ``numpy`` array. The shape of the
returned array is normalised - it will have at least three dimensions,
and any trailing dimensions of length 1 will be squeezed out. See
:meth:`Nifti.shape` and :meth:`Nifti.realShape`.
.. warning:: Calling this method will cause the entire image to be .. warning:: Calling this method may cause the entire image to be
loaded into memory. loaded into memory.
""" """
self.__imageWrapper.loadData()
return self[:] if self.__dataMgr is not None:
return self[:]
# The internal cache has real image shape -
# this makes code in __getitem__ easier,
# as it can use the real shape regardless
# of how the data is accessed
if self.__data is None:
self.__data = self.nibImage.dataobj[:]
return self.__data.reshape(self.shape)
@property
def inMemory(self):
"""Returns ``True`` if the image data has been loaded into memory,
``False``otherwise. This does not reflect whether the underlying
``nibabel.Nifti1Image`` object has loaded the image data into
memory. However, if all data access takes place through the ``Image``
class, the underlying ``nibabel`` image will not use a cache.
If custom ``DataManager`` has loaded the data, this method will always
return ``False``.
"""
return self.__data is not None
@property @property
...@@ -994,26 +1501,13 @@ class Image(Nifti): ...@@ -994,26 +1501,13 @@ class Image(Nifti):
@property @property
def dataRange(self): def dataRange(self):
"""Returns the image data range as a ``(min, max)`` tuple. If the """Returns the minimum/maxmimum image data values. """
``calcRange`` parameter to :meth:`__init__` was ``False``, these
values may not be accurate, and may change as more image data is
accessed.
If the data range has not been no data has been accessed,
``(None, None)`` is returned.
"""
if self.__imageWrapper is None: drange = (None, None)
else: drange = self.__imageWrapper.dataRange
# Fall back to the cal_min/max if self.__dataMgr is not None: return self.__dataMgr.dataRange
# fields in the NIFTI header elif self.__dataRange is not None: return self.__dataRange
# if we don't yet know anything
# about the image data range.
if drange[0] is None or drange[1] is None:
drange = (float(self.header['cal_min']),
float(self.header['cal_max']))
return drange self.__dataRange = nir.naninfrange(self.data)
return self.__dataRange
@property @property
...@@ -1022,10 +1516,40 @@ class Image(Nifti): ...@@ -1022,10 +1516,40 @@ class Image(Nifti):
# Get the data type from the # Get the data type from the
# first voxel in the image # first voxel in the image
coords = [0] * len(self.__nibImage.shape) coords = [0] * len(self.shape)
return self[tuple(coords)].dtype return self[tuple(coords)].dtype
@property
def nvals(self):
"""Returns the number of values per voxel in this image. This will
usually be 1, but may be 3 or 4, for images of type
``NIFTI_TYPE_RGB24`` or ``NIFTI_TYPE_RGBA32``.
"""
nvals = len(self.dtype)
if nvals == 0: return 1
else: return nvals
@property
def iscomplex(self):
"""Returns ``True`` if this image has a complex data type, ``False``
otherwise.
"""
return np.issubdtype(self.dtype, np.complexfloating)
@property
def editable(self):
"""Return ``True`` if the image data can be modified, ``False``
otherwise. Delegates to :meth:`DataManager.editable` if a
:class:`DataManager` is in use.
"""
if self.__dataMgr is not None: return self.__dataMgr.editable
else: return True
@Nifti.voxToWorldMat.setter @Nifti.voxToWorldMat.setter
def voxToWorldMat(self, xform): def voxToWorldMat(self, xform):
"""Overrides the :meth:`Nifti.voxToWorldMat` property setter. """Overrides the :meth:`Nifti.voxToWorldMat` property setter.
...@@ -1037,14 +1561,17 @@ class Image(Nifti): ...@@ -1037,14 +1561,17 @@ class Image(Nifti):
Nifti.voxToWorldMat.fset(self, xform) Nifti.voxToWorldMat.fset(self, xform)
if self.__nibImage is None:
return
xform = self.voxToWorldMat xform = self.voxToWorldMat
code = int(self.header['sform_code']) code = int(self.header['sform_code'])
self.__nibImage.set_sform(xform, code) self.__nibImage.set_sform(xform, code)
def __transformChanged(self, *args, **kwargs): def __headerChanged(self, *args, **kwargs):
"""Called when the ``voxToWorldMat`` of this :class:`Nifti` instance """Called when header properties of this :class:`Nifti` instance
changes. Updates the :attr:`saveState` accordinbgly. changes. Updates the :attr:`saveState` accordinbgly.
""" """
if self.__saveState: if self.__saveState:
...@@ -1052,63 +1579,34 @@ class Image(Nifti): ...@@ -1052,63 +1579,34 @@ class Image(Nifti):
self.notify(topic='saveState') self.notify(topic='saveState')
def __dataRangeChanged(self, *args, **kwargs): @deprecated.deprecated('3.9.0', '4.0.0', 'calcRange has no effect')
"""Called when the :class:`.ImageWrapper` data range changes. def calcRange(self, *args, **kwargs):
Notifies any listeners of this ``Image`` (registered through the """Deprecated, has no effect """
:class:`.Notifier` interface) on the ``'dataRange'`` topic.
"""
self.notify(topic='dataRange')
def calcRange(self, sizethres=None):
"""Forces calculation of the image data range.
:arg sizethres: If not ``None``, specifies an image size threshold
(total number of bytes). If the number of bytes in
the image is greater than this threshold, the range
is calculated on a sample (the first volume for a
4D image, or slice for a 3D image).
"""
# The ImageWrapper automatically calculates
# the range of the specified slice, whenever
# it gets indexed. All we have to do is
# access a portion of the data to trigger the
# range calculation.
nbytes = np.prod(self.shape) * self.dtype.itemsize
# If an image size threshold has not been specified,
# then we'll calculate the full data range right now.
if sizethres is None or nbytes < sizethres:
log.debug('%s: Forcing calculation of full '
'data range', self.name)
self.__imageWrapper[:]
else:
log.debug('%s: Calculating data range '
'from sample', self.name)
# Otherwise if the number of values in the
# image is bigger than the size threshold,
# we'll calculate the range from a sample:
self.__imageWrapper[..., 0]
@deprecated.deprecated('3.9.0', '4.0.0', 'loadData has no effect')
def loadData(self): def loadData(self):
"""Makes sure that the image data is loaded into memory. """Deprecated, has no effect """
See :meth:`.ImageWrapper.loadData`.
"""
self.__imageWrapper.loadData()
def save(self, filename=None): def save(self, filename=None):
"""Saves this ``Image`` to the specifed file, or the :attr:`dataSource` """Saves this ``Image`` to the specifed file, or the :attr:`dataSource`
if ``filename`` is ``None``. if ``filename`` is ``None``.
Note that calling ``save`` on an image with modified data will cause
the entire image data to be loaded into memory if it has not already
been loaded.
""" """
import fsl.utils.imcp as imcp
if self.__dataSource is None and filename is None: if self.__dataSource is None and filename is None:
raise ValueError('A file name must be specified') raise ValueError('A file name must be specified')
if self.__nibImage is None:
raise ValueError('Cannot save images without an '
'associated nibabel object')
if filename is None: if filename is None:
filename = self.__dataSource filename = self.__dataSource
...@@ -1123,41 +1621,46 @@ class Image(Nifti): ...@@ -1123,41 +1621,46 @@ class Image(Nifti):
# We save the image out to a temp file, # We save the image out to a temp file,
# then close the old image, move the # then close the old image, move the
# temp file to the real destination, # temp file to the real destination,
# then re-open the file. # then re-open the file. This is done
tmphd, tmpfname = tempfile.mkstemp(suffix=op.splitext(filename)[1]) # to ensure that all references to the
# old file are destroyed.
tmphd, tmpfname = tempfile.mkstemp(suffix=getExt(filename))
os.close(tmphd) os.close(tmphd)
try: try:
nib.save(self.__nibImage, tmpfname) # First of all, the nibabel object won't know
# about any image data modifications, so if
# any have occurred, we need to create a new
# nibabel image using our copy of the data,
# and the old header.
#
# Assuming here that analyze/nifti1/nifti2
# nibabel classes have an __init__ which
# expects (data, affine, header)
if not self.saveState:
self.__nibImage = type(self.__nibImage)(self.data,
affine=None,
header=self.header)
self.header = self.__nibImage.header
# nibabel should close any old nib.save(self.__nibImage, tmpfname)
# file handles when the image/
# header refs are deleted
self.__nibImage = None
self.header = None
shutil.copy(tmpfname, filename) # Copy to final destination,
# and reload from there
imcp.imcp(tmpfname, filename, overwrite=True)
self.__nibImage = nib.load(filename) self.__nibImage = nib.load(filename)
self.header = self.__nibImage.header self.header = self.__nibImage.header
finally: finally:
os.remove(tmpfname) os.remove(tmpfname)
raise
# Because we've created a new nibabel image, # Because we've created a new nibabel image,
# we have to create a new ImageWrapper # we may have to create a new DataManager
# instance too, as we have just destroyed # instance too, as we have just destroyed
# the nibabel image we gave to the last # the nibabel image we gave to the last one.
# one. if self.__dataMgr is not None:
self.__imageWrapper.deregister(self.__lName) self.__dataMgr = self.__dataMgr.copy(self.nibImage)
self.__imageWrapper = imagewrapper.ImageWrapper(
self.nibImage,
self.name,
loadData=False,
dataRange=self.dataRange,
threaded=self.__threaded)
self.__imageWrapper.register(self.__lName, self.__dataRangeChanged)
self.__dataSource = filename self.__dataSource = filename
self.__saveState = True self.__saveState = True
...@@ -1165,138 +1668,136 @@ class Image(Nifti): ...@@ -1165,138 +1668,136 @@ class Image(Nifti):
self.notify(topic='saveState') self.notify(topic='saveState')
def resample(self, def __getitem__(self, slc):
newShape, """Access the image data with the specified ``slc``.
sliceobj=None,
dtype=None,
order=1,
smooth=True):
"""Returns a copy of the data in this ``Image``, resampled to the
specified ``shape``.
:arg newShape: Desired shape. May containg floating point values,
in which case the resampled image will have shape
``round(newShape)``, but the voxel sizes will
have scales ``self.shape / newShape``.
:arg sliceobj: Slice into this ``Image``. If ``None``, the whole :arg slc: Something which can slice the image data.
image is resampled, and it is assumed that it has the
same number of dimensions as ``shape``. A
:exc:`ValueError` is raised if this is not the case.
:arg dtype: ``numpy`` data type of the resampled data. If ``None``,
the :meth:`dtype` of this ``Image`` is used.
:arg order: Spline interpolation order, passed through to the
``scipy.ndimage.affine_transform`` function - ``0``
corresponds to nearest neighbour interpolation, ``1``
(the default) to linear interpolation, and ``3`` to
cubic interpolation.
:arg smooth: If ``True`` (the default), the data is smoothed before
being resampled, but only along axes which are being
down-sampled (i.e. where
``newShape[i] < self.shape[i]``).
:returns: A tuple containing:
- A ``numpy`` array of shape ``shape``, containing an
interpolated copy of the data in this ``Image``.
- A ``numpy`` array of shape ``(4, 4)``, containing the
adjusted voxel-to-world transformation for the resampled
data.
""" """
if sliceobj is None: sliceobj = slice(None) log.debug('%s: __getitem__ [%s]', self.name, slc)
if dtype is None: dtype = self.dtype
# Make the slice object compatible
data = self[sliceobj] # with the actual image shape - e.g.
data = np.array(data, dtype=dtype, copy=False) # an underlying 2D image is presented
# as having 3 dimensions.
oldShape = np.array(data.shape, dtype=np.float) shape = self.shape
newShape = np.array(newShape, dtype=np.float) realShape = self.realShape
slc = canonicalSliceObj( slc, shape)
if len(oldShape) != len(newShape): fancy = isValidFancySliceObj(slc, shape)
raise ValueError('Shapes don\'t match') expNdims, expShape = expectedShape( slc, shape)
slc = canonicalSliceObj( slc, realShape)
if not np.all(np.isclose(oldShape, newShape)):
# The nibabel arrayproxy does not support
ratio = oldShape / newShape # boolean mask-based (a.k.a. "fancy")
newShape = np.array(np.round(newShape), dtype=np.int) # indexing. If we are given a mask, we
scale = transform.scaleOffsetXform(ratio, 0) # force-load the image data into memory.
if all((fancy,
# If interpolating and smoothing, we apply a self.__dataMgr is None,
# gaussian filter along axes with a resampling self.__data is None)):
# ratio greater than 1.1. We do this so that log.debug('Fancy slice detected - force-loading image '
# interpolation has an effect when down-sampling 'data into memory (%s)', self.name)
# to a resolution where the voxel centres are self.data
# aligned (as otherwise any interpolation regime
# will be equivalent to nearest neighbour). This if self.__dataMgr is not None: data = self.__dataMgr[slc]
# more-or-less mimics the behaviour of FLIRT. elif self.__data is not None: data = self.__data[slc]
if order > 0 and smooth: else: data = self.__nibImage.dataobj[slc]
sigma = np.array(ratio)
sigma[ratio < 1.1] = 0 # Make sure that the result has the
sigma[ratio >= 1.1] *= 0.425 # shape that the caller is expecting.
data = ndimage.gaussian_filter(data, sigma) if fancy: data = data.reshape((data.size, ))
else: data = data.reshape(expShape)
data = ndimage.affine_transform(data,
scale[:3, :3], # If expNdims == 0, we should
output_shape=newShape, # return a scalar. If expNdims
order=order) # == 0, but data.size != 1,
# something is wrong somewhere
# Construct an affine transform which # (and is not being handled
# puts the resampled image into the # here).
# same world coordinate system as this if expNdims == 0 and data.size == 1:
# image.
xform = transform.concat(self.voxToWorldMat, scale) # Funny behaviour with numpy scalar arrays.
else: # data[()] returns a numpy scalar (which is
xform = self.voxToWorldMat # what we want). But data.item() returns a
# python scalar. And if the data is a
return data, xform # ndarray with 0 dims, data[0] will raise
# an error!
data = data[()]
def __getitem__(self, sliceobj):
"""Access the image data with the specified ``sliceobj``. return data
:arg sliceobj: Something which can slice the image data.
def __setitem__(self, slc, values):
"""Set the image data at ``slc`` to ``values``.
:arg slc: Something which can slice the image data.
:arg values: New image data.
.. note:: Modifying image data may force the entire image to be
loaded into memory if it has not already been loaded.
""" """
log.debug('%s: __getitem__ [%s]', self.name, sliceobj) if not self.editable:
raise RuntimeError('Image is not editable')
return self.__imageWrapper.__getitem__(sliceobj) values = np.array(values)
if values.size == 0:
return
def __setitem__(self, sliceobj, values): log.debug('%s: __setitem__ [%s = %s]', self.name, slc, values.shape)
"""Set the image data at ``sliceobj`` to ``values``.
:arg sliceobj: Something which can slice the image data. realShape = self.realShape
:arg values: New image data. origslc = slc
slc = canonicalSliceObj(origslc, realShape)
.. note:: Modifying image data will force the entire image to be # If the image shape does not match its
loaded into memory if it has not already been loaded. # 'display' shape (either less three
""" # dims, or has trailing dims of length
values = np.array(values) # 1), we might need to re-shape the
# values to prevent numpy from raising
# an error in the assignment below.
if realShape != self.shape:
log.debug('%s: __setitem__ [%s = %s]', expNdims, expShape = expectedShape(slc, realShape)
self.name, sliceobj, values.shape)
with self.__imageWrapper.skip(self.__lName): # If we are slicing a scalar, the
# assigned value has to be scalar.
if expNdims == 0 and isinstance(values, abc.Sequence):
oldRange = self.__imageWrapper.dataRange if len(values) > 1:
self.__imageWrapper.__setitem__(sliceobj, values) raise IndexError('Invalid assignment: [{}] = {}'.format(
newRange = self.__imageWrapper.dataRange slc, len(values)))
if values.size > 0: values = np.array(values).flatten()[0]
self.notify(topic='data', value=sliceobj) # Make sure that the values
# have a compatible shape.
else:
values = np.array(values)
if values.shape != expShape:
values = values.reshape(expShape)
if self.__saveState: # Use DataManager to manage data
self.__saveState = False # access if one has been specified
self.notify(topic='saveState') if self.__dataMgr is not None:
self.__dataMgr[slc] = values
if not np.all(np.isclose(oldRange, newRange)): # Use an internal numpy array
self.notify(topic='dataRange') # to persist data changes
else:
# force-load data - see the data() method
# Reset data range whenever the data is
# modified - see dataRange() method
if self.__data is None:
self.data
self.__data[slc] = values
self.__dataRange = None
# Notify that data has changed/image is not saved
self.notify(topic='data', value=origslc)
if self.__saveState:
self.__saveState = False
self.notify(topic='saveState')
def canonicalShape(shape): def canonicalShape(shape):
...@@ -1322,6 +1823,133 @@ def canonicalShape(shape): ...@@ -1322,6 +1823,133 @@ def canonicalShape(shape):
return shape return shape
def isValidFancySliceObj(sliceobj, shape):
"""Returns ``True`` if the given ``sliceobj`` is a valid and fancy slice
object.
``nibabel`` refers to slice objects as "fancy" if they comprise anything
but tuples of integers and simple ``slice`` objects. The ``Image`` class
supports an additional type of "fancy" slicing, where the ``sliceobj`` is
a boolean ``numpy`` array of the same shape as the image.
This function returns ``True`` if the given ``sliceobj`` adheres to these
requirements, ``False`` otherwise.
"""
# We only support boolean numpy arrays
# which have the same shape as the image
return (isinstance(sliceobj, np.ndarray) and
sliceobj.dtype == bool and
np.prod(sliceobj.shape) == np.prod(shape))
def canonicalSliceObj(sliceobj, shape):
"""Returns a canonical version of the given ``sliceobj``. See the
``nibabel.fileslice.canonical_slicers`` function.
"""
# Fancy slice objects must have
# the same shape as the data
if isValidFancySliceObj(sliceobj, shape):
return sliceobj.reshape(shape)
else:
if not isinstance(sliceobj, tuple):
sliceobj = (sliceobj,)
if len(sliceobj) > len(shape):
sliceobj = sliceobj[:len(shape)]
return nib.fileslice.canonical_slicers(sliceobj, shape)
def expectedShape(sliceobj, shape):
"""Given a slice object, and the shape of an array to which
that slice object is going to be applied, returns the expected
shape of the result.
.. note:: It is assumed that the ``sliceobj`` has been passed through
the :func:`canonicalSliceObj` function.
:arg sliceobj: Something which can be used to slice an array
of shape ``shape``.
:arg shape: Shape of the array being sliced.
:returns: A tuple containing:
- Expected number of dimensions of the result
- Expected shape of the result (or ``None`` if
``sliceobj`` is fancy).
"""
if isValidFancySliceObj(sliceobj, shape):
return 1, None
# Truncate some dimensions from the
# slice object if it has too many
# (e.g. trailing dims of length 1).
elif len(sliceobj) > len(shape):
sliceobj = sliceobj[:len(shape)]
# Figure out the number of dimensions
# that the result should have, given
# this slice object.
expShape = []
for i in range(len(sliceobj)):
# Each dimension which has an
# int slice will be collapsed
if isinstance(sliceobj[i], int):
continue
start = sliceobj[i].start
stop = sliceobj[i].stop
if start is None: start = 0
if stop is None: stop = shape[i]
stop = min(stop, shape[i])
expShape.append(stop - start)
return len(expShape), expShape
def loadMetadata(image):
"""Searches for and loads any sidecar JSON files associated with the given
:class:`.Image`.
If the image looks to be part of a BIDS data set,
:func:`.bids.loadMetadata` is used. Otherwise, if a JSON file with the same
file prefix is present alongside the image, it is directly loaded.
:arg image: :class:`.Image` instance
:returns: Dict containing any metadata that was loaded.
"""
if image.dataSource is None:
return {}
filename = image.dataSource
basename = op.basename(removeExt(filename))
dirname = op.dirname(filename)
if fslbids.isBIDSFile(image.dataSource) and \
fslbids.inBIDSDir( image.dataSource):
return fslbids.loadMetadata(image.dataSource)
jsonfile = op.join(dirname, '{}.json'.format(basename))
if op.exists(jsonfile):
with open(jsonfile, 'rt') as f:
return json.load(f, strict=False)
return {}
def looksLikeImage(filename, allowedExts=None): def looksLikeImage(filename, allowedExts=None):
"""Returns ``True`` if the given file looks like a NIFTI image, ``False`` """Returns ``True`` if the given file looks like a NIFTI image, ``False``
otherwise. otherwise.
...@@ -1346,12 +1974,21 @@ def addExt(prefix, mustExist=True, unambiguous=True): ...@@ -1346,12 +1974,21 @@ def addExt(prefix, mustExist=True, unambiguous=True):
"""Adds a file extension to the given file ``prefix``. See """Adds a file extension to the given file ``prefix``. See
:func:`~fsl.utils.path.addExt`. :func:`~fsl.utils.path.addExt`.
""" """
return fslpath.addExt(prefix, try:
allowedExts=ALLOWED_EXTENSIONS, return fslpath.addExt(prefix,
mustExist=mustExist, allowedExts=ALLOWED_EXTENSIONS,
defaultExt=defaultExt(), mustExist=mustExist,
fileGroups=FILE_GROUPS, defaultExt=defaultExt(),
unambiguous=unambiguous) fileGroups=FILE_GROUPS,
unambiguous=unambiguous)
except fslpath.PathError as e:
# hacky: if more than one file with
# the prefix exists, we emit a
# warning, because in most cases
# this is a bad thing.
if str(e).startswith('More than'):
log.warning(e)
raise e
def splitExt(filename): def splitExt(filename):
...@@ -1375,26 +2012,105 @@ def removeExt(filename): ...@@ -1375,26 +2012,105 @@ def removeExt(filename):
return fslpath.removeExt(filename, ALLOWED_EXTENSIONS) return fslpath.removeExt(filename, ALLOWED_EXTENSIONS)
def defaultExt(): def fixExt(filename, **kwargs):
"""Fix the extension of ``filename``.
For example, if a file name is passed in as ``file.nii.gz``, but the
file is actually ``file.nii``, this function will fix the file name.
If ``filename`` already exists, it is returned unchanged.
All other arguments are passed through to :func:`addExt`.
"""
if op.exists(filename):
return filename
else:
return addExt(removeExt(filename), **kwargs)
class FileType(enum.Enum):
"""Enumeration of supported image file types. The values for each type
are the same as defined in the FSL ``newimage/newimage.h`` header file.
"""
NIFTI = 1
NIFTI2 = 2
ANALYZE = 10
NIFTI_PAIR = 11
NIFTI2_PAIR = 12
ANALYZE_GZ = 100
NIFTI_GZ = 101
NIFTI2_GZ = 102
NIFTI_PAIR_GZ = 111
NIFTI2_PAIR_GZ = 112
def fileType(filename) -> FileType:
"""Infer an image file type. """
filename = addExt( filename)
extension = getExt( filename)
img = nib.load(filename)
# Mappings between (image type, file extension), and type code.
# Order is important due to the nibabel class hierarchy - we
# must test in order NIFTI2, NIFTI1, ANALYZE
anlz = (nib.AnalyzeImage, nib.AnalyzeHeader)
nii1 = (nib.Nifti1Image, nib.Nifti1Pair, nib.Nifti1Header)
nii2 = (nib.Nifti2Image, nib.Nifti2Pair, nib.Nifti2Header)
mappings = [
(nii2, '.nii', FileType.NIFTI2),
(nii2, '.nii.gz', FileType.NIFTI2_GZ),
(nii2, '.hdr', FileType.NIFTI2_PAIR),
(nii2, '.img', FileType.NIFTI2_PAIR),
(nii2, '.hdr.gz', FileType.NIFTI2_PAIR_GZ),
(nii2, '.img.gz', FileType.NIFTI2_PAIR_GZ),
(nii1, '.nii', FileType.NIFTI),
(nii1, '.nii.gz', FileType.NIFTI_GZ),
(nii1, '.hdr', FileType.NIFTI_PAIR),
(nii1, '.img', FileType.NIFTI_PAIR),
(nii1, '.hdr.gz', FileType.NIFTI_PAIR_GZ),
(nii1, '.img.gz', FileType.NIFTI_PAIR_GZ),
(anlz, '.hdr', FileType.ANALYZE),
(anlz, '.img', FileType.ANALYZE),
(anlz, '.hdr.gz', FileType.ANALYZE_GZ),
(anlz, '.img.gz', FileType.ANALYZE_GZ),
]
for ftype, ext, code in mappings:
if isinstance(img, ftype) and extension == ext:
return code
raise ValueError(f'Could not infer image type: {filename}')
def defaultOutputType() -> FileType:
"""Return the default output file type. This is based on the
``$FSLOUTPUTTYPE`` environment variable.
"""
fsloutputtype = os.environ.get('FSLOUTPUTTYPE', None)
if fsloutputtype not in FileType.__members__:
fsloutputtype = 'NIFTI_GZ'
return FileType[fsloutputtype]
def defaultExt() -> str:
"""Returns the default NIFTI file extension that should be used. """Returns the default NIFTI file extension that should be used.
If the ``$FSLOUTPUTTYPE`` variable is set, its value is used. If the ``$FSLOUTPUTTYPE`` variable is set, its value is used.
Otherwise, ``.nii.gz`` is returned. Otherwise, ``.nii.gz`` is returned.
""" """
# TODO: Add analyze support.
options = { options = {
'NIFTI' : '.nii', FileType.ANALYZE : '.img',
'NIFTI_PAIR' : '.img', FileType.NIFTI : '.nii',
'NIFTI_GZ' : '.nii.gz', FileType.NIFTI2 : '.nii',
FileType.NIFTI_GZ : '.nii.gz',
FileType.NIFTI2_GZ : '.nii.gz',
FileType.NIFTI_PAIR : '.img',
FileType.NIFTI2_PAIR : '.img',
FileType.ANALYZE_GZ : '.img.gz',
FileType.NIFTI_PAIR_GZ : '.img.gz',
FileType.NIFTI2_PAIR_GZ : '.img.gz',
} }
outputType = os.environ.get('FSLOUTPUTTYPE', 'NIFTI_GZ') return options[defaultOutputType()]
return options.get(outputType, '.nii.gz')
@deprecated.deprecated('2.0.0', '3.0.0', 'Use nibabel.load instead.')
def loadIndexedImageFile(filename):
"""Deprecated - this call is equivalent to calling ``nibabel.load``. """
return nib.load(filename)
...@@ -7,6 +7,9 @@ ...@@ -7,6 +7,9 @@
"""This module provides the :class:`ImageWrapper` class, which can be used """This module provides the :class:`ImageWrapper` class, which can be used
to manage data access to ``nibabel`` NIFTI images. to manage data access to ``nibabel`` NIFTI images.
.. note:: This module is deprecated - it is being moved to the FSLeyes project,
and will be removed in a future version of fslpy.
Terminology Terminology
----------- -----------
...@@ -37,13 +40,15 @@ get their definitions straight: ...@@ -37,13 +40,15 @@ get their definitions straight:
""" """
import logging import logging
import collections import collections
import itertools as it import collections.abc as abc
import itertools as it
import numpy as np import numpy as np
import nibabel as nib
import fsl.data.image as fslimage
import fsl.utils.deprecated as deprecated
import fsl.utils.notifier as notifier import fsl.utils.notifier as notifier
import fsl.utils.naninfrange as nir import fsl.utils.naninfrange as nir
import fsl.utils.idle as idle import fsl.utils.idle as idle
...@@ -75,8 +80,7 @@ class ImageWrapper(notifier.Notifier): ...@@ -75,8 +80,7 @@ class ImageWrapper(notifier.Notifier):
- The image data is not modified (via :meth:`__setitem__`. - The image data is not modified (via :meth:`__setitem__`.
If any of these conditions do not hold, the image data will be loaded into If any of these conditions do not hold, the image data will be loaded into
memory and accessed directly, via the ``nibabel.Nifti1Image.get_data`` memory and accessed directly.
method.
*Image dimensionality* *Image dimensionality*
...@@ -148,6 +152,8 @@ class ImageWrapper(notifier.Notifier): ...@@ -148,6 +152,8 @@ class ImageWrapper(notifier.Notifier):
""" """
@deprecated.deprecated('3.9.0', '4.0.0',
'The ImageWrapper has been migrated to FSLeyes')
def __init__(self, def __init__(self,
image, image,
name=None, name=None,
...@@ -175,8 +181,6 @@ class ImageWrapper(notifier.Notifier): ...@@ -175,8 +181,6 @@ class ImageWrapper(notifier.Notifier):
data range is updated directly on reads/writes. data range is updated directly on reads/writes.
""" """
import fsl.data.image as fslimage
self.__image = image self.__image = image
self.__name = name self.__name = name
self.__taskThread = None self.__taskThread = None
...@@ -189,10 +193,10 @@ class ImageWrapper(notifier.Notifier): ...@@ -189,10 +193,10 @@ class ImageWrapper(notifier.Notifier):
if d == 1: self.__numRealDims -= 1 if d == 1: self.__numRealDims -= 1
else: break else: break
# Degenerate case - if every # Degenerate case - less
# dimension has length 1 # than three real dimensions
if self.__numRealDims == 0: if self.__numRealDims < 3:
self.__numRealDims = len(image.shape) self.__numRealDims = min(3, len(image.shape))
# And save the number of # And save the number of
# 'padding' dimensions too. # 'padding' dimensions too.
...@@ -217,7 +221,11 @@ class ImageWrapper(notifier.Notifier): ...@@ -217,7 +221,11 @@ class ImageWrapper(notifier.Notifier):
self.reset(dataRange) self.reset(dataRange)
if loadData: # We keep an internal ref to
# the data numpy array if/when
# it is loaded in memory
self.__data = None
if loadData or image.in_memory:
self.loadData() self.loadData()
if threaded: if threaded:
...@@ -231,6 +239,7 @@ class ImageWrapper(notifier.Notifier): ...@@ -231,6 +239,7 @@ class ImageWrapper(notifier.Notifier):
the :class:`.TaskThread` is stopped. the :class:`.TaskThread` is stopped.
""" """
self.__image = None self.__image = None
self.__data = None
if self.__taskThread is not None: if self.__taskThread is not None:
self.__taskThread.stop() self.__taskThread.stop()
self.__taskThread = None self.__taskThread = None
...@@ -300,7 +309,16 @@ class ImageWrapper(notifier.Notifier): ...@@ -300,7 +309,16 @@ class ImageWrapper(notifier.Notifier):
# Internally, we calculate and store the # Internally, we calculate and store the
# data range for each volume/slice/vector # data range for each volume/slice/vector
self.__volRanges = np.zeros((nvols, 2), dtype=np.float32) #
# We use nan as a placeholder, so the
# dtype must be non-integral. The
# len(dtype) check takes into account
# structured data (e.g. RGB)
dtype = self.__image.get_data_dtype()
if np.issubdtype(dtype, np.integer) or len(dtype) > 0:
dtype = np.float32
self.__volRanges = np.zeros((nvols, 2),
dtype=dtype)
self.__coverage[ :] = np.nan self.__coverage[ :] = np.nan
self.__volRanges[:] = np.nan self.__volRanges[:] = np.nan
...@@ -367,16 +385,19 @@ class ImageWrapper(notifier.Notifier): ...@@ -367,16 +385,19 @@ class ImageWrapper(notifier.Notifier):
"""Forces all of the image data to be loaded into memory. """Forces all of the image data to be loaded into memory.
.. note:: This method will be called by :meth:`__init__` if its .. note:: This method will be called by :meth:`__init__` if its
``loadData`` parameter is ``True``. ``loadData`` parameter is ``True``. It will also be called
on all write operations (see :meth:`__setitem__`).
""" """
if self.__data is None:
self.__data = np.asanyarray(self.__image.dataobj)
# If the data is not already loaded, this will
# cause nibabel to load it. By default, nibabel @property
# will cache the numpy array that contains the def dataIsLoaded(self):
# image data, so subsequent calls to this """Return true if the image data has been loaded into memory, ``False``
# method will not overwrite any changes that otherwise.
# have been made to the data array. """
self.__image.get_data() return self.__data is not None
def __getData(self, sliceobj, isTuple=False): def __getData(self, sliceobj, isTuple=False):
...@@ -397,14 +418,7 @@ class ImageWrapper(notifier.Notifier): ...@@ -397,14 +418,7 @@ class ImageWrapper(notifier.Notifier):
# ArrayProxy. Otheriwse if it is in # ArrayProxy. Otheriwse if it is in
# memory, we can access it directly. # memory, we can access it directly.
# #
# Furthermore, if it is in memory and # Note also that if the caller has
# has been modified, the ArrayProxy
# will give us out-of-date values (as
# the ArrayProxy reads from disk). So
# we have to read from the in-memory
# array to get changed values.
#
# Finally, note that if the caller has
# given us a 'fancy' slice object (a # given us a 'fancy' slice object (a
# boolean numpy array), but the image # boolean numpy array), but the image
# data is not in memory, we can't access # data is not in memory, we can't access
...@@ -412,8 +426,8 @@ class ImageWrapper(notifier.Notifier): ...@@ -412,8 +426,8 @@ class ImageWrapper(notifier.Notifier):
# (the dataobj attribute) cannot handle # (the dataobj attribute) cannot handle
# fancy indexing. In this case an error # fancy indexing. In this case an error
# will be raised. # will be raised.
if self.__image.in_memory: return self.__image.get_data()[sliceobj] if self.__data is not None: return self.__data[ sliceobj]
else: return self.__image.dataobj[ sliceobj] else: return self.__image.dataobj[sliceobj]
def __imageIsCovered(self): def __imageIsCovered(self):
...@@ -434,18 +448,18 @@ class ImageWrapper(notifier.Notifier): ...@@ -434,18 +448,18 @@ class ImageWrapper(notifier.Notifier):
_, expansions = calcExpansion(slices, self.__coverage) _, expansions = calcExpansion(slices, self.__coverage)
expansions = collapseExpansions(expansions, self.__numRealDims - 1) expansions = collapseExpansions(expansions, self.__numRealDims - 1)
log.debug('Updating image {} data range [slice: {}] ' log.debug('Updating image %s data range [slice: %s] '
'(current range: [{}, {}]; ' '(current range: [%s, %s]; '
'number of expansions: {}; ' 'number of expansions: %s; '
'current coverage: {}; ' 'current coverage: %s; '
'volume ranges: {})'.format( 'volume ranges: %s)',
self.__name, self.__name,
slices, slices,
self.__range[0], self.__range[0],
self.__range[1], self.__range[1],
len(expansions), len(expansions),
self.__coverage, self.__coverage,
self.__volRanges)) self.__volRanges)
# As we access the data for each expansions, # As we access the data for each expansions,
# we want it to have the same dimensionality # we want it to have the same dimensionality
...@@ -497,12 +511,12 @@ class ImageWrapper(notifier.Notifier): ...@@ -497,12 +511,12 @@ class ImageWrapper(notifier.Notifier):
if any((oldmin is None, oldmax is None)) or \ if any((oldmin is None, oldmax is None)) or \
not np.all(np.isclose([oldmin, oldmax], [newmin, newmax])): not np.all(np.isclose([oldmin, oldmax], [newmin, newmax])):
log.debug('Image {} range changed: [{}, {}] -> [{}, {}]'.format( log.debug('Image %s range changed: [%s, %s] -> [%s, %s]',
self.__name, self.__name,
oldmin, oldmin,
oldmax, oldmax,
newmin, newmin,
newmax)) newmax)
self.notify() self.notify()
...@@ -581,15 +595,15 @@ class ImageWrapper(notifier.Notifier): ...@@ -581,15 +595,15 @@ class ImageWrapper(notifier.Notifier):
slices = np.array(slices.T, dtype=np.uint32) slices = np.array(slices.T, dtype=np.uint32)
slices = tuple(it.chain(map(tuple, slices), [(lowVol, highVol)])) slices = tuple(it.chain(map(tuple, slices), [(lowVol, highVol)]))
log.debug('Image {} data written - clearing known data ' log.debug('Image %s data written - clearing known data '
'range on volumes {} - {} (write slice: {}; ' 'range on volumes %s - %s (write slice: %s; '
'coverage: {}; volRanges: {})'.format( 'coverage: %s; volRanges: %s)',
self.__name, self.__name,
lowVol, lowVol,
highVol, highVol,
slices, slices,
self.__coverage[:, :, lowVol:highVol], self.__coverage[:, :, lowVol:highVol],
self.__volRanges[lowVol:highVol, :])) self.__volRanges[lowVol:highVol, :])
for vol in range(lowVol, highVol): for vol in range(lowVol, highVol):
self.__coverage[:, :, vol] = np.nan self.__coverage[:, :, vol] = np.nan
...@@ -612,7 +626,7 @@ class ImageWrapper(notifier.Notifier): ...@@ -612,7 +626,7 @@ class ImageWrapper(notifier.Notifier):
:arg sliceobj: Something which can slice the image data. :arg sliceobj: Something which can slice the image data.
""" """
log.debug('Getting image data: {}'.format(sliceobj)) log.debug('Getting image data: %s', sliceobj)
shape = self.__canonicalShape shape = self.__canonicalShape
realShape = self.__image.shape realShape = self.__image.shape
...@@ -689,13 +703,13 @@ class ImageWrapper(notifier.Notifier): ...@@ -689,13 +703,13 @@ class ImageWrapper(notifier.Notifier):
# If we are slicing a scalar, the # If we are slicing a scalar, the
# assigned value has to be scalar. # assigned value has to be scalar.
if expNdims == 0 and isinstance(values, collections.Sequence): if expNdims == 0 and isinstance(values, abc.Sequence):
if len(values) > 1: if len(values) > 1:
raise IndexError('Invalid assignment: [{}] = {}'.format( raise IndexError('Invalid assignment: [{}] = {}'.format(
sliceobj, len(values))) sliceobj, len(values)))
values = values[0] values = np.array(values).flatten()[0]
# Make sure that the values # Make sure that the values
# have a compatible shape. # have a compatible shape.
...@@ -711,108 +725,33 @@ class ImageWrapper(notifier.Notifier): ...@@ -711,108 +725,33 @@ class ImageWrapper(notifier.Notifier):
# have any effect. # have any effect.
self.loadData() self.loadData()
self.__image.get_data()[sliceobj] = values self.__data[sliceobj] = values
self.__updateDataRangeOnWrite(slices, values) self.__updateDataRangeOnWrite(slices, values)
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to fsl.data.image')
def isValidFancySliceObj(sliceobj, shape): def isValidFancySliceObj(sliceobj, shape):
"""Returns ``True`` if the given ``sliceobj`` is a valid and fancy slice """Deprecated - moved to :mod:`fsl.data.image`."""
object. return fslimage.isValidFancySliceObj(sliceobj, shape)
``nibabel`` refers to slice objects as "fancy" if they comprise anything
but tuples of integers and simple ``slice`` objects. The ``ImageWrapper``
class supports one type of "fancy" slicing, where the ``sliceobj`` is a
boolean ``numpy`` array of the same shape as the image.
This function returns ``True`` if the given ``sliceobj`` adheres to these
requirements, ``False`` otherwise.
"""
# We only support boolean numpy arrays
# which have the same shape as the image
return (isinstance(sliceobj, np.ndarray) and
sliceobj.dtype == np.bool and
np.prod(sliceobj.shape) == np.prod(shape))
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to fsl.data.image')
def canonicalSliceObj(sliceobj, shape): def canonicalSliceObj(sliceobj, shape):
"""Returns a canonical version of the given ``sliceobj``. See the """Deprecated - moved to :mod:`fsl.data.image`."""
``nibabel.fileslice.canonical_slicers`` function. return fslimage.canonicalSliceObj(sliceobj, shape)
"""
# Fancy slice objects must have
# the same shape as the data
if isValidFancySliceObj(sliceobj, shape):
return sliceobj.reshape(shape)
else:
if not isinstance(sliceobj, tuple):
sliceobj = (sliceobj,)
if len(sliceobj) > len(shape):
sliceobj = sliceobj[:len(shape)]
return nib.fileslice.canonical_slicers(sliceobj, shape)
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to fsl.data.image')
def expectedShape(sliceobj, shape): def expectedShape(sliceobj, shape):
"""Given a slice object, and the shape of an array to which """Deprecated - moved to :mod:`fsl.data.image`."""
that slice object is going to be applied, returns the expected return fslimage.expectedShape(sliceobj, shape)
shape of the result.
.. note:: It is assumed that the ``sliceobj`` has been passed through
the :func:`canonicalSliceObj` function.
:arg sliceobj: Something which can be used to slice an array
of shape ``shape``.
:arg shape: Shape of the array being sliced.
:returns: A tuple containing:
- Expected number of dimensions of the result
- Expected shape of the result (or ``None`` if
``sliceobj`` is fancy).
"""
if isValidFancySliceObj(sliceobj, shape):
return 1, None
# Truncate some dimensions from the
# slice object if it has too many
# (e.g. trailing dims of length 1).
elif len(sliceobj) > len(shape):
sliceobj = sliceobj[:len(shape)]
# Figure out the number of dimensions
# that the result should have, given
# this slice object.
expShape = []
for i in range(len(sliceobj)):
# Each dimension which has an
# int slice will be collapsed
if isinstance(sliceobj[i], int):
continue
start = sliceobj[i].start
stop = sliceobj[i].stop
if start is None: start = 0
if stop is None: stop = shape[i]
stop = min(stop, shape[i])
expShape.append(stop - start)
return len(expShape), expShape
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to FSLeyes')
def sliceObjToSliceTuple(sliceobj, shape): def sliceObjToSliceTuple(sliceobj, shape):
"""Turns an array slice object into a tuple of (low, high) index """Deprecated - the imagewrapper has been moved to FSLeyes.
Turns an array slice object into a tuple of (low, high) index
pairs, one pair for each dimension in the given shape pairs, one pair for each dimension in the given shape
:arg sliceobj: Something which can be used to slice an array of shape :arg sliceobj: Something which can be used to slice an array of shape
...@@ -828,7 +767,7 @@ def sliceObjToSliceTuple(sliceobj, shape): ...@@ -828,7 +767,7 @@ def sliceObjToSliceTuple(sliceobj, shape):
# The sliceobj could be a single sliceobj # The sliceobj could be a single sliceobj
# or integer, instead of a tuple # or integer, instead of a tuple
if not isinstance(sliceobj, collections.Sequence): if not isinstance(sliceobj, abc.Sequence):
sliceobj = [sliceobj] sliceobj = [sliceobj]
# Turn e.g. array[6] into array[6, :, :] # Turn e.g. array[6] into array[6, :, :]
...@@ -851,8 +790,11 @@ def sliceObjToSliceTuple(sliceobj, shape): ...@@ -851,8 +790,11 @@ def sliceObjToSliceTuple(sliceobj, shape):
return tuple(indices) return tuple(indices)
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to FSLeyes')
def sliceTupleToSliceObj(slices): def sliceTupleToSliceObj(slices):
"""Turns a sequence of (low, high) index pairs into a tuple of array """Deprecated - the imagewrapper has been moved to FSLeyes.
Turns a sequence of (low, high) index pairs into a tuple of array
``slice`` objects. ``slice`` objects.
:arg slices: A sequence of (low, high) index pairs. :arg slices: A sequence of (low, high) index pairs.
...@@ -866,8 +808,11 @@ def sliceTupleToSliceObj(slices): ...@@ -866,8 +808,11 @@ def sliceTupleToSliceObj(slices):
return tuple(sliceobj) return tuple(sliceobj)
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to FSLeyes')
def adjustCoverage(oldCoverage, slices): def adjustCoverage(oldCoverage, slices):
"""Adjusts/expands the given ``oldCoverage`` so that it covers the """Deprecated - the imagewrapper has been moved to FSLeyes.
Adjusts/expands the given ``oldCoverage`` so that it covers the
given set of ``slices``. given set of ``slices``.
:arg oldCoverage: A ``numpy`` array of shape ``(2, n)`` containing :arg oldCoverage: A ``numpy`` array of shape ``(2, n)`` containing
...@@ -914,8 +859,11 @@ return code for the :func:`sliceOverlap` function. ...@@ -914,8 +859,11 @@ return code for the :func:`sliceOverlap` function.
""" """
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to FSLeyes')
def sliceOverlap(slices, coverage): def sliceOverlap(slices, coverage):
"""Determines whether the given ``slices`` overlap with the given """Deprecated - the imagewrapper has been moved to FSLeyes.
Determines whether the given ``slices`` overlap with the given
``coverage``. ``coverage``.
:arg slices: A sequence of (low, high) index pairs, assumed to cover :arg slices: A sequence of (low, high) index pairs, assumed to cover
...@@ -981,8 +929,11 @@ def sliceOverlap(slices, coverage): ...@@ -981,8 +929,11 @@ def sliceOverlap(slices, coverage):
elif np.all(overlapStates == OVERLAP_ALL): return OVERLAP_ALL elif np.all(overlapStates == OVERLAP_ALL): return OVERLAP_ALL
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to FSLeyes')
def sliceCovered(slices, coverage): def sliceCovered(slices, coverage):
"""Returns ``True`` if the portion of the image data calculated by """Deprecated - the imagewrapper has been moved to FSLeyes.
Returns ``True`` if the portion of the image data calculated by
the given ``slices` has already been calculated, ``False`` otherwise. the given ``slices` has already been calculated, ``False`` otherwise.
:arg slices: A sequence of (low, high) index pairs, assumed to cover :arg slices: A sequence of (low, high) index pairs, assumed to cover
...@@ -1013,8 +964,11 @@ def sliceCovered(slices, coverage): ...@@ -1013,8 +964,11 @@ def sliceCovered(slices, coverage):
return True return True
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to FSLeyes')
def calcExpansion(slices, coverage): def calcExpansion(slices, coverage):
"""Calculates a series of *expansion* slices, which can be used to expand """Deprecated - the imagewrapper has been moved to FSLeyes.
Calculates a series of *expansion* slices, which can be used to expand
the given ``coverage`` so that it includes the given ``slices``. the given ``coverage`` so that it includes the given ``slices``.
:arg slices: Slices that the coverage needs to be expanded to cover. :arg slices: Slices that the coverage needs to be expanded to cover.
...@@ -1183,8 +1137,11 @@ def calcExpansion(slices, coverage): ...@@ -1183,8 +1137,11 @@ def calcExpansion(slices, coverage):
return volumes, expansions return volumes, expansions
@deprecated.deprecated('3.9.0', '4.0.0', 'Moved to FSLeyes')
def collapseExpansions(expansions, numDims): def collapseExpansions(expansions, numDims):
"""Scans through the given list of expansions (each assumed to pertain """Deprecated - the imagewrapper has been moved to FSLeyes.
Scans through the given list of expansions (each assumed to pertain
to a single 3D image), and combines any which cover the same to a single 3D image), and combines any which cover the same
image area, and cover adjacent volumes. image area, and cover adjacent volumes.
......
...@@ -33,7 +33,6 @@ import logging ...@@ -33,7 +33,6 @@ import logging
import os.path as op import os.path as op
import numpy as np import numpy as np
import fsl.utils.path as fslpath
import fsl.data.image as fslimage import fsl.data.image as fslimage
import fsl.data.featanalysis as featanalysis import fsl.data.featanalysis as featanalysis
...@@ -63,10 +62,9 @@ def isMelodicImage(path): ...@@ -63,10 +62,9 @@ def isMelodicImage(path):
def isMelodicDir(path): def isMelodicDir(path):
"""Returns ``True`` if the given path looks like it is contained within """Returns ``True`` if the given path looks like it is a MELODIC directory,
a MELODIC directory, ``False`` otherwise. A melodic directory: ``False`` otherwise. A MELODIC directory:
- Must be named ``*.ica``.
- Must contain a file called ``melodic_IC.nii.gz`` or - Must contain a file called ``melodic_IC.nii.gz`` or
``melodic_oIC.nii.gz``. ``melodic_oIC.nii.gz``.
- Must contain a file called ``melodic_mix``. - Must contain a file called ``melodic_mix``.
...@@ -75,12 +73,7 @@ def isMelodicDir(path): ...@@ -75,12 +73,7 @@ def isMelodicDir(path):
path = op.abspath(path) path = op.abspath(path)
if op.isdir(path): dirname = path if not op.isdir(path):
else: dirname = op.dirname(path)
sufs = ['.ica']
if not any([dirname.endswith(suf) for suf in sufs]):
return False return False
# Must contain an image file called # Must contain an image file called
...@@ -88,7 +81,7 @@ def isMelodicDir(path): ...@@ -88,7 +81,7 @@ def isMelodicDir(path):
prefixes = ['melodic_IC', 'melodic_oIC'] prefixes = ['melodic_IC', 'melodic_oIC']
for p in prefixes: for p in prefixes:
try: try:
fslimage.addExt(op.join(dirname, p)) fslimage.addExt(op.join(path, p))
break break
except fslimage.PathError: except fslimage.PathError:
pass pass
...@@ -97,8 +90,8 @@ def isMelodicDir(path): ...@@ -97,8 +90,8 @@ def isMelodicDir(path):
# Must contain files called # Must contain files called
# melodic_mix and melodic_FTmix # melodic_mix and melodic_FTmix
if not op.exists(op.join(dirname, 'melodic_mix')): return False if not op.exists(op.join(path, 'melodic_mix')): return False
if not op.exists(op.join(dirname, 'melodic_FTmix')): return False if not op.exists(op.join(path, 'melodic_FTmix')): return False
return True return True
...@@ -108,10 +101,13 @@ def getAnalysisDir(path): ...@@ -108,10 +101,13 @@ def getAnalysisDir(path):
to that MELODIC directory is returned. Otherwise, ``None`` is returned. to that MELODIC directory is returned. Otherwise, ``None`` is returned.
""" """
meldir = fslpath.deepest(path, ['.ica']) if not op.isdir(path):
path = op.dirname(path)
if meldir is not None and isMelodicDir(meldir): while path not in (op.sep, ''):
return meldir if isMelodicDir(path):
return path
path = op.dirname(path)
return None return None
...@@ -137,10 +133,18 @@ def getDataFile(meldir): ...@@ -137,10 +133,18 @@ def getDataFile(meldir):
if topDir is None: if topDir is None:
return None return None
dataFile = op.join(topDir, 'filtered_func_data') # People often rename filtered_func_data.nii.gz
# to something like filtered_func_data_clean.nii.gz,
# because that is the recommended approach when
# performing ICA-based denoising). So we try both.
candidates = ['filtered_func_data', 'filtered_func_data_clean']
try: return fslimage.addExt(dataFile) for candidate in candidates:
except fslimage.PathError: return None dataFile = op.join(topDir, candidate)
try: return fslimage.addExt(dataFile)
except fslimage.PathError: continue
return None
def getMeanFile(meldir): def getMeanFile(meldir):
...@@ -187,7 +191,7 @@ def getNumComponents(meldir): ...@@ -187,7 +191,7 @@ def getNumComponents(meldir):
contained in the given directrory. contained in the given directrory.
""" """
icImg = fslimage.Image(getICFile(meldir), loadData=False, calcRange=False) icImg = fslimage.Image(getICFile(meldir))
return icImg.shape[3] return icImg.shape[3]
......
...@@ -74,9 +74,7 @@ class MelodicImage(fslimage.Image): ...@@ -74,9 +74,7 @@ class MelodicImage(fslimage.Image):
dataFile = self.getDataFile() dataFile = self.getDataFile()
if dataFile is not None: if dataFile is not None:
dataImage = fslimage.Image(dataFile, dataImage = fslimage.Image(dataFile)
loadData=False,
calcRange=False)
if dataImage.ndim >= 4: if dataImage.ndim >= 4:
self.__tr = dataImage.pixdim[3] self.__tr = dataImage.pixdim[3]
......
...@@ -33,14 +33,21 @@ import collections ...@@ -33,14 +33,21 @@ import collections
import os.path as op import os.path as op
import numpy as np import numpy as np
import fsl.utils.meta as meta import fsl.utils.meta as meta
import fsl.utils.notifier as notifier import fsl.utils.notifier as notifier
import fsl.utils.transform as transform import fsl.transform.affine as affine
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class IncompatibleVerticesError(ValueError):
"""``ValueError`` raised by the :meth:`Mesh.addVertices` method if
an attempt is made to add a vertex set with the wrong number of
vertices.
"""
class Mesh(notifier.Notifier, meta.Meta): class Mesh(notifier.Notifier, meta.Meta):
"""The ``Mesh`` class represents a 3D model. A mesh is defined by a """The ``Mesh`` class represents a 3D model. A mesh is defined by a
collection of ``N`` vertices, and ``M`` triangles. The triangles are collection of ``N`` vertices, and ``M`` triangles. The triangles are
...@@ -98,6 +105,12 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -98,6 +105,12 @@ class Mesh(notifier.Notifier, meta.Meta):
selectedVertices selectedVertices
vertexSets vertexSets
.. note:: Internally the ``Mesh`` class may store two versions of the
triangles, with opposite unwinding orders. It keeps track of the
required triangle unwinding order for each vertex set, so that
the :meth:`indices` method will return the appropriate copy for
the currently selected vertex set.
**Vertex data** **Vertex data**
...@@ -148,16 +161,8 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -148,16 +161,8 @@ class Mesh(notifier.Notifier, meta.Meta):
""" """
def __new__(cls, *args, **kwargs):
"""Create a ``Mesh``. We must override ``__new__``, otherwise the
:class:`Meta` and :class:`Notifier` ``__new__`` methods will not be
called correctly.
"""
return super(Mesh, cls).__new__(cls, *args, **kwargs)
def __init__(self, def __init__(self,
indices, indices=None,
name='mesh', name='mesh',
dataSource=None, dataSource=None,
vertices=None, vertices=None,
...@@ -168,7 +173,8 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -168,7 +173,8 @@ class Mesh(notifier.Notifier, meta.Meta):
:meth:`addVertices` method. :meth:`addVertices` method.
:arg indices: A list of indices into the vertex data, defining the :arg indices: A list of indices into the vertex data, defining the
mesh triangles. mesh triangles. If not provided, must be provided
after creation via the :meth:`indices` setter method.
:arg name: A name for this ``Mesh``. :arg name: A name for this ``Mesh``.
...@@ -181,24 +187,31 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -181,24 +187,31 @@ class Mesh(notifier.Notifier, meta.Meta):
:meth:`addVertices` method along with ``vertices``. :meth:`addVertices` method along with ``vertices``.
""" """
if indices is None and vertices is not None:
raise ValueError('Indices must be provided '
'if vertices are provided')
self.__name = name self.__name = name
self.__dataSource = dataSource self.__dataSource = dataSource
self.__indices = np.asarray(indices).reshape((-1, 3))
self.__nvertices = self.__indices.max() + 1
# This attribute is used to store
# the currently selected vertex set,
# used as a kety into all of the
# dictionaries below.
self.__selected = None
# Flag used to keep track of whether # nvertices/indices are assigned in the
# the triangle winding order has been # indices setter method.
# "fixed" - see the addVertices method.
self.__fixed = False # We potentially store two copies of
# the indices, - one set (__indices)
# as provided, and the other
# (__fixedIndices) with opposite
# unwinding orders. The vindices dict
# stores refs to one or the other for
# each vertex set.
self.__nvertices = None
self.__indices = None
self.__fixedIndices = None
self.__vindices = collections.OrderedDict()
# All of these are populated # All of these are populated
# in the addVertices method # in the addVertices method
self.__selected = None
self.__vertices = collections.OrderedDict() self.__vertices = collections.OrderedDict()
self.__loBounds = collections.OrderedDict() self.__loBounds = collections.OrderedDict()
self.__hiBounds = collections.OrderedDict() self.__hiBounds = collections.OrderedDict()
...@@ -216,8 +229,9 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -216,8 +229,9 @@ class Mesh(notifier.Notifier, meta.Meta):
# in the trimesh method # in the trimesh method
self.__trimesh = collections.OrderedDict() self.__trimesh = collections.OrderedDict()
# Add initial vertex # Add initial indices/vertices if provided
# set if provided if indices is not None:
self.indices = indices
if vertices is not None: if vertices is not None:
self.addVertices(vertices, fixWinding=fixWinding) self.addVertices(vertices, fixWinding=fixWinding)
...@@ -241,6 +255,12 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -241,6 +255,12 @@ class Mesh(notifier.Notifier, meta.Meta):
return self.__name return self.__name
@name.setter
def name(self, name):
"""Set the name of this ``Mesh``. """
self.__name = name
@property @property
def dataSource(self): def dataSource(self):
"""Returns the data source of this ``Mesh``. """ """Returns the data source of this ``Mesh``. """
...@@ -280,8 +300,23 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -280,8 +300,23 @@ class Mesh(notifier.Notifier, meta.Meta):
@property @property
def indices(self): def indices(self):
"""The ``(M, 3)`` triangles of this mesh. """ """The ``(M, 3)`` triangles of this mesh. Returns ``None`` if
return self.__indices indices have not yet been assigned.
"""
if self.__indices is None:
return None
return self.__vindices[self.__selected]
@indices.setter
def indices(self, indices):
"""Set the indices for this mesh. """
if self.__indices is not None:
raise ValueError('Indices are already set')
indices = np.asarray(indices, dtype=np.int32)
self.__nvertices = int(indices.max()) + 1
self.__indices = indices.reshape((-1, 3))
@property @property
...@@ -291,7 +326,7 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -291,7 +326,7 @@ class Mesh(notifier.Notifier, meta.Meta):
""" """
selected = self.__selected selected = self.__selected
indices = self.__indices indices = self.__vindices[selected]
vertices = self.__vertices[selected] vertices = self.__vertices[selected]
fnormals = self.__faceNormals.get(selected, None) fnormals = self.__faceNormals.get(selected, None)
...@@ -309,7 +344,7 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -309,7 +344,7 @@ class Mesh(notifier.Notifier, meta.Meta):
""" """
selected = self.__selected selected = self.__selected
indices = self.__indices indices = self.__vindices[selected]
vertices = self.__vertices[selected] vertices = self.__vertices[selected]
vnormals = self.__vertNormals.get(selected, None) vnormals = self.__vertNormals.get(selected, None)
...@@ -327,11 +362,15 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -327,11 +362,15 @@ class Mesh(notifier.Notifier, meta.Meta):
``Mesh`` instance. The bounding box is arranged like so: ``Mesh`` instance. The bounding box is arranged like so:
``((xlow, ylow, zlow), (xhigh, yhigh, zhigh))`` ``((xlow, ylow, zlow), (xhigh, yhigh, zhigh))``
Returns ``None`` if indices or vertices have not yet been assigned.
""" """
if self.__indices is None or len(self.__vertices) == 0:
return None
lo = self.__loBounds[self.__selected] lo = self.__loBounds[self.__selected]
hi = self.__hiBounds[self.__selected] hi = self.__hiBounds[self.__selected]
return lo, hi return lo, hi
...@@ -380,10 +419,13 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -380,10 +419,13 @@ class Mesh(notifier.Notifier, meta.Meta):
:returns: The vertices, possibly reshaped :returns: The vertices, possibly reshaped
:raises: ``ValueError`` if the provided ``vertices`` array :raises: ``IncompatibleVerticesError`` if the provided
has the wrong number of vertices. ``vertices`` array has the wrong number of vertices.
""" """
if self.__indices is None:
raise ValueError('Mesh indices have not yet been set')
if key is None: if key is None:
key = 'default' key = 'default'
...@@ -399,32 +441,33 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -399,32 +441,33 @@ class Mesh(notifier.Notifier, meta.Meta):
# reshape raised an error - # reshape raised an error -
# wrong number of vertices # wrong number of vertices
except ValueError: except ValueError:
raise ValueError('{}: invalid number of vertices: ' raise IncompatibleVerticesError(
'{} != ({}, 3)'.format(key, f'{key}: invalid number of vertices: '
vertices.shape, f'{vertices.shape} != ({self.nvertices}, 3)')
self.nvertices))
self.__vertices[key] = vertices self.__vertices[key] = vertices
self.__vindices[key] = self.__indices
self.__loBounds[key] = lo self.__loBounds[key] = lo
self.__hiBounds[key] = hi self.__hiBounds[key] = hi
if select: if select:
self.vertices = key self.vertices = key
# indices already fixed? if fixWinding:
if fixWinding and (not self.__fixed): indices = self.__indices
indices = self.indices normals = calcFaceNormals(vertices, indices)
normals = self.normals needsFix = needsFixing(vertices, indices, normals, lo, hi)
needsFix = needsFixing(vertices, indices, normals, lo, hi)
self.__fixed = True
# See needsFixing documentation # See needsFixing documentation
if needsFix: if needsFix:
indices[:, [1, 2]] = indices[:, [2, 1]] if self.__fixedIndices is None:
self.__fixedIndices = indices[:, [0, 2, 1]]
for k, fn in self.__faceNormals.items(): self.__vindices[ key] = self.__fixedIndices
self.__faceNormals[k] = fn * -1 self.__faceNormals[key] = normals * -1
else:
self.__faceNormals[key] = normals
return vertices return vertices
...@@ -571,7 +614,7 @@ class Mesh(notifier.Notifier, meta.Meta): ...@@ -571,7 +614,7 @@ class Mesh(notifier.Notifier, meta.Meta):
# sort by ray. I'm Not sure if this is # sort by ray. I'm Not sure if this is
# needed - does trimesh do it for us? # needed - does trimesh do it for us?
rayIdxs = np.asarray(np.argsort(rays), np.int) rayIdxs = np.asarray(np.argsort(rays))
locs = locs[rayIdxs] locs = locs[rayIdxs]
tris = tris[rayIdxs] tris = tris[rayIdxs]
...@@ -683,9 +726,9 @@ def calcFaceNormals(vertices, indices): ...@@ -683,9 +726,9 @@ def calcFaceNormals(vertices, indices):
v2 = vertices[indices[:, 2]] v2 = vertices[indices[:, 2]]
fnormals = np.cross((v1 - v0), (v2 - v0)) fnormals = np.cross((v1 - v0), (v2 - v0))
fnormals = transform.normalise(fnormals) fnormals = affine.normalise(fnormals)
return fnormals return np.atleast_2d(fnormals)
def calcVertexNormals(vertices, indices, fnormals): def calcVertexNormals(vertices, indices, fnormals):
...@@ -699,7 +742,7 @@ def calcVertexNormals(vertices, indices, fnormals): ...@@ -699,7 +742,7 @@ def calcVertexNormals(vertices, indices, fnormals):
the mesh. the mesh.
""" """
vnormals = np.zeros((vertices.shape[0], 3), dtype=np.float) vnormals = np.zeros((vertices.shape[0], 3), dtype=float)
# TODO make fast. I can't figure # TODO make fast. I can't figure
# out how to use np.add.at to # out how to use np.add.at to
...@@ -714,7 +757,7 @@ def calcVertexNormals(vertices, indices, fnormals): ...@@ -714,7 +757,7 @@ def calcVertexNormals(vertices, indices, fnormals):
vnormals[v2, :] += fnormals[i] vnormals[v2, :] += fnormals[i]
# normalise to unit length # normalise to unit length
return transform.normalise(vnormals) return affine.normalise(vnormals)
def needsFixing(vertices, indices, fnormals, loBounds, hiBounds): def needsFixing(vertices, indices, fnormals, loBounds, hiBounds):
...@@ -748,15 +791,19 @@ def needsFixing(vertices, indices, fnormals, loBounds, hiBounds): ...@@ -748,15 +791,19 @@ def needsFixing(vertices, indices, fnormals, loBounds, hiBounds):
ivert = np.argmin(dists) ivert = np.argmin(dists)
vert = vertices[ivert] vert = vertices[ivert]
# Pick a triangle that # Get all the triangles
# this vertex is in and # that this vertex is in
# ges its face normal # and their face normals
itri = np.where(indices == ivert)[0][0] itris = np.where(indices == ivert)[0]
n = fnormals[itri, :] norms = fnormals[itris, :]
# Make sure the angle between the # Calculate the angle between each
# normal, and a vector from the # normal, and a vector from the
# vertex to the camera is positive # vertex to the camera. If more than
# If it isn't, we need to flip the # 50% of the angles are negative
# triangle winding order. # (== more than 90 degrees == the
return np.dot(n, transform.normalise(camera - vert)) < 0 # face is facing away from the
# camera), assume that we need to
# flip the triangle winding order.
angles = np.dot(norms, affine.normalise(camera - vert))
return ((angles >= 0).sum() / len(itris)) < 0.5
...@@ -10,13 +10,14 @@ Freesurfer ``mgh``/``mgz`` image files. ...@@ -10,13 +10,14 @@ Freesurfer ``mgh``/``mgz`` image files.
import os.path as op import os.path as op
import pathlib
import six import numpy as np
import nibabel as nib import nibabel as nib
import fsl.utils.path as fslpath import fsl.utils.path as fslpath
import fsl.utils.transform as transform import fsl.transform.affine as affine
import fsl.data.image as fslimage import fsl.data.image as fslimage
ALLOWED_EXTENSIONS = ['.mgz', '.mgh'] ALLOWED_EXTENSIONS = ['.mgz', '.mgh']
...@@ -37,7 +38,7 @@ class MGHImage(fslimage.Image): ...@@ -37,7 +38,7 @@ class MGHImage(fslimage.Image):
- http://nipy.org/nibabel/reference/nibabel.freesurfer.html - http://nipy.org/nibabel/reference/nibabel.freesurfer.html
""" """
def __init__(self, image, *args, **kwargs): def __init__(self, image, **kwargs):
"""Create a ``MGHImage``. """Create a ``MGHImage``.
:arg image: Name of MGH file, or a :arg image: Name of MGH file, or a
...@@ -46,7 +47,7 @@ class MGHImage(fslimage.Image): ...@@ -46,7 +47,7 @@ class MGHImage(fslimage.Image):
All other arguments are passed through to :meth:`Image.__init__` All other arguments are passed through to :meth:`Image.__init__`
""" """
if isinstance(image, six.string_types): if isinstance(image, (str, pathlib.Path)):
filename = op.abspath(image) filename = op.abspath(image)
name = op.basename(filename) name = op.basename(filename)
image = nib.load(image) image = nib.load(image)
...@@ -54,23 +55,42 @@ class MGHImage(fslimage.Image): ...@@ -54,23 +55,42 @@ class MGHImage(fslimage.Image):
name = 'MGH image' name = 'MGH image'
filename = None filename = None
data = image.get_data() data = np.asanyarray(image.dataobj)
affine = image.affine xform = image.affine
pixdim = image.header.get_zooms()
vox2surf = image.header.get_vox2ras_tkr() vox2surf = image.header.get_vox2ras_tkr()
# the image may have an affine which
# transforms the data into some space
# with a scaling that is different to
# the pixdims. So we create a header
# object with both the affine and the
# pixdims, so they are both preserved.
#
# Note that we have to set the zooms
# after the s/qform, otherwise nibabel
# will clobber them with zooms gleaned
# fron the affine.
header = nib.nifti1.Nifti1Header()
header.set_data_shape(data.shape)
header.set_sform(xform)
header.set_qform(xform)
header.set_zooms(pixdim)
fslimage.Image.__init__(self, fslimage.Image.__init__(self,
data, data,
xform=affine, header=header,
name=name, name=name,
dataSource=filename) dataSource=filename,
**kwargs)
if filename is not None: if filename is not None:
self.setMeta('mghImageFile', filename) self.setMeta('mghImageFile', filename)
self.__voxToSurfMat = vox2surf self.__voxToSurfMat = vox2surf
self.__surfToVoxMat = transform.invert(vox2surf) self.__surfToVoxMat = affine.invert(vox2surf)
self.__surfToWorldMat = transform.concat(affine, self.__surfToVoxMat) self.__surfToWorldMat = affine.concat(xform, self.__surfToVoxMat)
self.__worldToSurfMat = transform.invert(self.__surfToWorldMat) self.__worldToSurfMat = affine.invert(self.__surfToWorldMat)
def save(self, filename=None): def save(self, filename=None):
...@@ -127,3 +147,30 @@ class MGHImage(fslimage.Image): ...@@ -127,3 +147,30 @@ class MGHImage(fslimage.Image):
coordinates into the surface coordinate system for this image. coordinates into the surface coordinate system for this image.
""" """
return self.__worldToSurfMat return self.__worldToSurfMat
def voxToSurfMat(img):
"""Generate an affine which can transform the voxel coordinates of
the given image into a corresponding Freesurfer surface coordinate
system (known as "Torig", or "vox2ras-tkr").
See https://surfer.nmr.mgh.harvard.edu/fswiki/CoordinateSystems
:arg img: An :class:`.Image` object.
:return: A ``(4, 4)`` matrix encoding an affine transformation from the
image voxel coordinate system to the corresponding Freesurfer
surface coordinate system.
"""
zooms = np.array(img.pixdim[:3])
dims = img.shape[ :3] * zooms / 2
xform = np.zeros((4, 4), dtype=np.float32)
xform[ 0, 0] = -zooms[0]
xform[ 1, 2] = zooms[2]
xform[ 2, 1] = -zooms[1]
xform[ 3, 3] = 1
xform[:3, 3] = [dims[0], -dims[2], dims[1]]
return xform
#!/usr/bin/env python
#
# utils.py - Miscellaneous utility functions
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
"""This module is home to some miscellaneous utility functions for working
with the data types defined in the :mod:`fsl.data` package.
"""
import os.path as op
import numpy as np
def guessType(path):
"""A convenience function which, given the name of a file or directory,
attempts to figure out a suitable data type.
Returns a tuple containing two values - a type which should be able to
load the path, and the path itself, possibly adjusted. If the type
is unrecognised, the first tuple value will be ``None``.
"""
import fsl.utils.path as fslpath
import fsl.data.image as fslimage
import fsl.data.vtk as fslvtk
import fsl.data.gifti as fslgifti
import fsl.data.freesurfer as fslfs
import fsl.data.mghimage as fslmgh
import fsl.data.bitmap as fslbmp
import fsl.data.featimage as featimage
import fsl.data.melodicimage as melimage
import fsl.data.dtifit as dtifit
import fsl.data.melodicanalysis as melanalysis
import fsl.data.featanalysis as featanalysis
# Support files opened via fsleyes:// URL
if path.startswith('fsleyes://'):
path = path[10:]
path = op.abspath(path)
# Accept images sans-extension
try:
path = fslimage.addExt(path, mustExist=True)
except fslimage.PathError:
pass
if op.isfile(path):
# Some types are easy - just check the extensions
if fslpath.hasExt(path.lower(), fslvtk.ALLOWED_EXTENSIONS):
return fslvtk.VTKMesh, path
elif fslpath.hasExt(path.lower(), fslgifti.ALLOWED_EXTENSIONS):
return fslgifti.GiftiMesh, path
elif fslfs.isGeometryFile(path):
return fslfs.FreesurferMesh, path
elif fslpath.hasExt(path.lower(), fslmgh.ALLOWED_EXTENSIONS):
return fslmgh.MGHImage, path
elif fslpath.hasExt(path.lower(), fslbmp.BITMAP_EXTENSIONS):
return fslbmp.Bitmap, path
# Other specialised image types
elif melanalysis .isMelodicImage(path):
return melimage.MelodicImage, path
elif featanalysis.isFEATImage( path):
return featimage.FEATImage, path
elif fslimage.looksLikeImage(path):
return fslimage.Image, path
# Analysis directory?
elif op.isdir(path):
if melanalysis.isMelodicDir(path):
return melimage.MelodicImage, path
elif featanalysis.isFEATDir(path):
return featimage.FEATImage, path
elif dtifit.isDTIFitPath(path):
return dtifit.DTIFitTensor, path
# Otherwise, I don't
# know what to do
return None, path
def makeWriteable(array):
"""Updates the given ``numpy.array`` so that it is writeable. If this
is not possible, a copy is created and returned.
"""
try:
# Versions of numpy prior to 1.16 will
# happily mutate a bytes array, whcih
# is supposed to be immutable. So if
# is the case, let's force a copy.
if isinstance(array.base, bytes):
raise ValueError()
# In versions of numpy 1.16 and newer,
# setting the WRITEABLE flag on an
# immutable array will cause a
# ValueError to be raised
array.flags['WRITEABLE'] = True
except ValueError:
array = np.array(array)
return array
...@@ -11,9 +11,14 @@ ...@@ -11,9 +11,14 @@
looksLikeVestLutFile looksLikeVestLutFile
loadVestLutFile loadVestLutFile
loadVestFile
generateVest
""" """
import textwrap as tw
import io
import numpy as np import numpy as np
...@@ -22,7 +27,16 @@ def looksLikeVestLutFile(path): ...@@ -22,7 +27,16 @@ def looksLikeVestLutFile(path):
``False`` otherwise. ``False`` otherwise.
""" """
with open(path, 'rt') as f: with open(path, 'rt') as f:
return f.readline().strip() == '%!VEST-LUT'
lines = []
for i in range(10):
line = f.readline()
if line is None: break
else: lines.append(line.strip())
validHeaders = ('%!VEST-LUT', '%BeginInstance', '%%BeginInstance')
return len(lines) > 0 and lines[0] in validHeaders
def loadVestLutFile(path, normalise=True): def loadVestLutFile(path, normalise=True):
...@@ -67,3 +81,70 @@ def loadVestLutFile(path, normalise=True): ...@@ -67,3 +81,70 @@ def loadVestLutFile(path, normalise=True):
else: else:
return colours return colours
def loadVestFile(path, ignoreHeader=True):
"""Loads numeric data from a VEST file, returning it as a ``numpy`` array.
:arg ignoreHeader: if ``True`` (the default), the matrix shape specified
in the VEST header information is ignored, and the shape
inferred from the data. Otherwise, if the number of
rows/columns specified in the VEST header information
does not match the matrix shape, a ``ValueError`` is
raised.
:returns: a ``numpy`` array containing the matrix data in the
VEST file.
"""
data = np.loadtxt(path, comments=['#', '/'])
if not ignoreHeader:
nrows, ncols = None, None
with open(path, 'rt') as f:
for line in f:
if 'NumWaves' in line: ncols = int(line.split()[1])
elif 'NumPoints' in line: nrows = int(line.split()[1])
else: continue
if (ncols is not None) and (nrows is not None):
break
if tuple(data.shape) != (nrows, ncols):
raise ValueError(f'Invalid VEST file ({path}) - data shape '
f'({data.shape}) does not match header '
f'({nrows}, {ncols})')
return data
def generateVest(data):
"""Generates VEST-formatted text for the given ``numpy`` array.
:arg data: A 1D or 2D numpy array.
:returns: A string containing a VEST header, and the ``data``.
"""
data = np.asanyarray(data)
if len(data.shape) not in (1, 2):
raise ValueError(f'unsupported number of dimensions: {data.shape}')
data = np.atleast_2d(data)
if np.issubdtype(data.dtype, np.integer): fmt = '%d'
else: fmt = '%0.12f'
sdata = io.StringIO()
np.savetxt(sdata, data, fmt=fmt)
sdata = sdata.getvalue()
nrows, ncols = data.shape
vest = tw.dedent(f"""
/NumWaves {ncols}
/NumPoints {nrows}
/Matrix
""").strip() + '\n' + sdata
return vest.strip()