diff --git a/fsl/data/cifti.py b/fsl/data/cifti.py new file mode 100644 index 0000000000000000000000000000000000000000..a1521d56f3c2fe798a8b8c5fc2830cc7c966c155 --- /dev/null +++ b/fsl/data/cifti.py @@ -0,0 +1,493 @@ +""" +Provides a sparse representation of volumetric and/or surface data + +The data can be either defined per voxel/vertex (:class:`DenseCifti`) or per parcel (`class:`ParcelCifti`). + +The data can be read from NIFTI, GIFTI, or CIFTI files. +Non-sparse volumetric or surface representations can be extracte. +""" +from nibabel.cifti2 import cifti2_axes +from typing import Sequence, Optional, Union +import numpy as np +from fsl.data import image +import nibabel as nib +from fsl.utils.path import addExt + + +dense_extensions = { + cifti2_axes.BrainModelAxis: '.dconn.nii', + cifti2_axes.ParcelsAxis: '.dpconn.nii', + cifti2_axes.SeriesAxis: '.dtseries.nii', + cifti2_axes.ScalarAxis: '.dscalar.nii', + cifti2_axes.LabelAxis: '.dlabel.nii', +} + +parcel_extensions = { + cifti2_axes.BrainModelAxis: '.pdconn.nii', + cifti2_axes.ParcelsAxis: '.pconn.nii', + cifti2_axes.SeriesAxis: '.ptseries.nii', + cifti2_axes.ScalarAxis: '.pscalar.nii', + cifti2_axes.LabelAxis: '.plabel.nii', +} + + +class Cifti: + """ + Parent class for the two types of CIFTI files. + + The type of the CIFTI file is determined by the last axis, which can be one of: + + - :py:class:`BrainModelAxis <cifti2_axes.BrainModelAxis>` + - :py:class:`ParcelsAxis <cifti2_axes.ParcelsAxis>` + """ + def __init__(self, arr: np.ndarray, axes: Sequence[Optional[cifti2_axes.Axis]]): + """ + Defines a new dataset in greyordinate space + + :param data: (..., N) array for N greyordinates or parcels; can contain Nones for undefined axes + :param axes: sequence of CIFTI axes describing the data along each dimension + """ + self.arr = arr + axes = tuple(axes) + while self.arr.ndim > len(axes): + axes = (None, ) + axes + self.axes = axes + if not all(ax is None or len(ax) == sz for ax, sz in zip(axes, self.arr.shape)): + raise ValueError(f"Shape of axes {tuple(-1 if ax is None else len(ax) for ax in axes)} does not " + f"match shape of array {self.arr.shape}") + + def to_cifti(self, default_axis=None): + """ + Create a CIFTI image from the data + + :param default_axis: What to use as an axis along any undefined dimensions + + - By default an error is raised + - if set to "scalar" a ScalarAxis is used with names of "default {index}" + - if set to "series" a SeriesAxis is used + + :return: nibabel CIFTI image + """ + if any(ax is None for ax in self.axes): + if default_axis is None: + raise ValueError("Can not store to CIFTI without defining what is stored along each dimension") + elif default_axis == 'scalar': + def get_axis(n: int): + return cifti2_axes.ScalarAxis([f'default {idx + 1}' for idx in range(n)]) + elif default_axis == 'series': + def get_axis(n: int): + return cifti2_axes.SeriesAxis(0, 1, n) + else: + raise ValueError(f"default_axis should be set to None, 'scalar', or 'series', not {default_axis}") + new_axes = [ + get_axis(sz) if ax is None else ax + for ax, sz in zip(self.axes, self.arr.shape) + ] + else: + new_axes = list(self.axes) + + data = self.arr + if data.ndim == 1: + # CIFTI axes are always at least 2D + data = data[None, :] + new_axes.insert(0, cifti2_axes.ScalarAxis(['default'])) + + return nib.Cifti2Image(data, header=new_axes) + + @classmethod + def from_cifti(cls, filename, writable=False): + """ + Creates new greyordinate object from dense CIFTI file + + :param filename: CIFTI filename or :class:`nib.Cifti2Image` object + :param writable: if True, opens data array in writable mode + """ + if isinstance(filename, str): + img = nib.load(filename) + else: + img = filename + + if not isinstance(img, nib.Cifti2Image): + raise ValueError(f"Input {filename} should be CIFTI filename or nibabel Cifti2Image") + + if writable: + data = np.memmap(filename, img.dataobj.dtype, mode='r+', + offset=img.dataobj.offset, shape=img.shape, order='F') + else: + data = np.asanyarray(img.dataobj) + + axes = [img.header.get_axis(idx) for idx in range(data.ndim)] + + if isinstance(axes[-1], cifti2_axes.BrainModelAxis): + return DenseCifti(data, axes) + elif isinstance(axes[-1], cifti2_axes.ParcelsAxis): + return ParcelCifti(data, axes) + raise ValueError("Last axis of CIFTI object should be a BrainModelAxis or ParcelsAxis") + + def save(self, cifti_filename, default_axis=None): + """ + Writes this sparse representation to/from a filename + + :param cifti_filename: output filename + :param default_axis: What to use as an axis along any undefined dimensions + + - By default an error is raised + - if set to "scalar" a ScalarAxis is used with names of "default {index}" + - if set to "series" a SeriesAxis is used + :return: + """ + self.to_cifti(default_axis).to_filename(addExt(cifti_filename, defaultExt=self.extension, mustExist=False)) + + @classmethod + def from_gifti(cls, filename, mask_values=(0, np.nan)): + """ + Creates a new greyordinate object from a GIFTI file + + :param filename: GIFTI filename + :param mask_values: values to mask out + :return: greyordinate object representing the unmasked vertices + """ + if isinstance(filename, str): + img = nib.load(filename) + else: + img = filename + datasets = [darr.data for darr in img.darrays] + if len(datasets) == 1: + data = datasets[0] + else: + data = np.concatenate( + [np.atleast_2d(d) for d in datasets], axis=0 + ) + mask = np.ones(data.shape, dtype='bool') + for value in mask_values: + if value is np.nan: + mask &= ~np.isnan(data) + else: + mask &= ~(data == value) + while mask.ndim > 1: + mask = mask.any(0) + + anatomy = BrainStructure.from_gifti(img) + + bm_axes = cifti2_axes.BrainModelAxis.from_mask(mask, name=anatomy.cifti) + return DenseCifti(data[..., mask], [bm_axes]) + + @classmethod + def from_image(cls, input, mask_values=(np.nan, 0)): + """ + Creates a new greyordinate object from a NIFTI file + + :param input: FSL :class:`image.Image` object + :param mask_values: which values to mask out + :return: greyordinate object representing the unmasked voxels + """ + img = image.Image(input) + + mask = np.ones(img.data.shape, dtype='bool') + for value in mask_values: + if value is np.nan: + mask &= ~np.isnan(img.data) + else: + mask &= ~(img.data == value) + while mask.ndim > 3: + mask = mask.any(-1) + if np.sum(mask) == 0: + raise ValueError("No unmasked voxels found in NIFTI image") + + inverted_data = np.transpose(img.data[mask], tuple(range(1, img.data.ndim - 2)) + (0, )) + bm_axes = cifti2_axes.BrainModelAxis.from_mask(mask, affine=img.nibImage.affine) + return DenseCifti(inverted_data, [bm_axes]) + + +class DenseCifti(Cifti): + """ + Represents sparse data defined for a subset of voxels and vertices (i.e., greyordinates) + """ + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if not isinstance(self.brain_model_axis, cifti2_axes.BrainModelAxis): + raise ValueError(f"DenseCifti expects a BrainModelAxis as last axes object, not {type(self.brain_model_axis)}") + + @property + def brain_model_axis(self, ) -> cifti2_axes.BrainModelAxis: + return self.axes[-1] + + @property + def extension(self, ): + if self.arr.ndim == 1: + return dense_extensions[cifti2_axes.ScalarAxis] + return dense_extensions[type(self.axes[-2])] + + def to_image(self, fill=0) -> image.Image: + """ + Get the volumetric data as an :class:`image.Image` + """ + if self.brain_model_axis.volume_mask.sum() == 0: + raise ValueError(f"Can not create volume without voxels in {self}") + data = np.full(self.brain_model_axis.volume_shape + self.arr.shape[:-1], fill, + dtype=self.arr.dtype) + voxels = self.brain_model_axis.voxel[self.brain_model_axis.volume_mask] + data[tuple(voxels.T)] = np.transpose(self.arr, (-1,) + tuple(range(self.arr.ndim - 1)))[ + self.brain_model_axis.volume_mask] + return image.Image(data, xform=self.brain_model_axis.affine) + + def surface(self, anatomy, fill=np.nan, partial=False): + """ + Gets a specific surface + + If `partial` is True a view of the data rather than a copy is returned. + + :param anatomy: BrainStructure or string like 'CortexLeft' or 'CortexRight' + :param fill: which value to fill the array with if not all vertices are defined + :param partial: only return the part of the surface defined in the greyordinate file (ignores `fill` if set) + :return: + - if not partial: (..., n_vertices) array + - if partial: tuple with (N, ) int array with indices on the surface included in (..., N) array + """ + if isinstance(anatomy, str): + anatomy = BrainStructure.from_string(anatomy, issurface=True) + if anatomy.cifti not in self.brain_model_axis.name: + raise ValueError(f"No surface data for {anatomy.cifti} found") + slc, bm = None, None + arr = np.full(self.arr.shape[:-1] + (self.brain_model_axis.nvertices[anatomy.cifti],), fill, + dtype=self.arr.dtype) + for name, slc_try, bm_try in self.brain_model_axis.iter_structures(): + if name == anatomy.cifti: + if partial: + if bm is not None: + raise ValueError(f"Surface {anatomy} does not form a contiguous block") + slc, bm = slc_try, bm_try + else: + arr[..., bm_try.vertex] = self.arr[..., slc_try] + if not partial: + return arr + else: + return bm.vertex, self.arr[..., slc] + + +class ParcelCifti(Cifti): + """ + Represents sparse data defined at specific parcels + """ + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if not isinstance(self.parcel_axis, cifti2_axes.ParcelsAxis): + raise ValueError(f"ParcelCifti expects a ParcelsAxis as last axes object, not {type(self.parcel_axis)}") + + @property + def extension(self, ): + if self.arr.ndim == 1: + return parcel_extensions[cifti2_axes.ScalarAxis] + return parcel_extensions[type(self.axes[-2])] + + @property + def parcel_axis(self, ) -> cifti2_axes.ParcelsAxis: + return self.axes[-1] + + def to_image(self, fill=0): + """ + Get the volumetric data as an :class:`Image` + """ + data = np.full(self.parcel_axis.volume_shape + self.arr.shape[:-1], fill, dtype=self.arr.dtype) + written = np.zeros(self.parcel_axis.volume_shape, dtype='bool') + for idx, write_to in enumerate(self.parcel_axis.voxels): + if written[tuple(write_to.T)].any(): + raise ValueError("Duplicate voxels in different parcels") + data[tuple(write_to.T)] = self.arr[np.newaxis, ..., idx] + written[tuple(write_to.T)] = True + if not written.any(): + raise ValueError("Parcellation does not contain any volumetric data") + return image.Image(data, xform=self.parcel_axis.affine) + + def surface(self, anatomy, fill=np.nan, partial=False): + """ + Gets a specific surface + + :param anatomy: BrainStructure or string like 'CortexLeft' or 'CortexRight' + :param fill: which value to fill the array with if not all vertices are defined + :param partial: only return the part of the surface defined in the greyordinate file (ignores `fill` if set) + :return: + - if not partial: (..., n_vertices) array + - if partial: tuple with (N, ) int array with indices on the surface included in (..., N) array + """ + if isinstance(anatomy, str): + anatomy = BrainStructure.from_string(anatomy, issurface=True) + if anatomy.cifti not in self.parcel_axis.nvertices: + raise ValueError(f"No surface data for {anatomy.cifti} found") + + arr = np.full(self.arr.shape[:-1] + (self.parcel_axis.nvertices[anatomy.cifti],), fill, + dtype=self.arr.dtype) + written = np.zeros(self.parcel_axis.nvertices[anatomy.cifti], dtype='bool') + for idx, vertices in enumerate(self.parcel_axis.vertices): + if anatomy.cifti not in vertices: + continue + write_to = vertices[anatomy.cifti] + if written[write_to].any(): + raise ValueError("Duplicate vertices in different parcels") + arr[..., write_to] = self.arr[..., idx, np.newaxis] + written[write_to] = True + + if not partial: + return arr + else: + return np.where(written)[0], arr[..., written] + + +class BrainStructure(object): + """Which brain structure does the parent object describe? + + Supports how brain structures are stored in both GIFTI and CIFTI files + """ + def __init__(self, primary, secondary=None, hemisphere='both', geometry=None): + """Creates a new brain structure + + :param primary: Name of the brain structure (e.g. cortex, thalamus) + :param secondary: Further specification of which part of the brain structure is described (e.g. 'white' or + 'pial' for the cortex) + :param hemisphere: which hemisphere is the brain structure in ('left', 'right', or 'both') + :param geometry: does the parent object describe the 'volume' or the 'surface' + """ + self.primary = primary.lower() + self.secondary = None if secondary is None else secondary.lower() + self.hemisphere = hemisphere.lower() + if geometry not in (None, 'surface', 'volume'): + raise ValueError(f"Invalid value for geometry: {geometry}") + self.geometry = geometry + + def __eq__(self, other): + """Two brain structures are equal if they could describe the same structure + """ + if isinstance(other, str): + other = self.from_string(other) + match_primary = (self.primary == other.primary or self.primary == 'all' or other.primary == 'all' or + self.primary == other.geometry or self.geometry == other.primary) + match_hemisphere = self.hemisphere == other.hemisphere + match_secondary = (self.secondary is None or other.secondary is None or self.secondary == other.secondary) + match_geometry = (self.geometry is None or other.geometry is None or self.geometry == other.geometry) + return match_primary and match_hemisphere and match_secondary and match_geometry + + @property + def gifti(self, ): + """Returns the keywords needed to define the surface in the meta information of a GIFTI file + """ + main = self.primary.capitalize() + ('' if self.hemisphere == 'both' else self.hemisphere.capitalize()) + res = {'AnatomicalStructurePrimary': main} + if self.secondary is not None: + res['AnatomicalStructureSecondary'] = self.secondary.capitalize() + return res + + def __str__(self, ): + """Returns a short description of the brain structure + """ + if self.secondary is None: + return self.primary.capitalize() + self.hemisphere.capitalize() + else: + return "%s%s(%s)" % (self.primary.capitalize(), self.hemisphere.capitalize(), self.secondary) + + @property + def cifti(self, ): + """Returns a description of the brain structure needed to define the surface in a CIFTI file + """ + return 'CIFTI_STRUCTURE_' + self.primary.upper() + ('' if self.hemisphere == 'both' else ('_' + self.hemisphere.upper())) + + @classmethod + def from_string(cls, value, issurface=None): + """Parses a string to find out which brain structure is being described + + :param value: string to be parsed + :param issurface: defines whether the object describes the volume or surface of the brain structure (default: surface if the brain structure is the cortex volume otherwise) + """ + if '_' in value: + items = [val.lower() for val in value.split('_')] + if items[-1] in ['left', 'right', 'both']: + hemisphere = items[-1] + others = items[:-1] + elif items[0] in ['left', 'right', 'both']: + hemisphere = items[0] + others = items[1:] + else: + hemisphere = 'both' + others = items + if others[0] in ['nifti', 'cifti', 'gifti']: + others = others[2:] + primary = '_'.join(others) + else: + low = value.lower() + if 'left' == low[-4:]: + hemisphere = 'left' + primary = low[:-4] + elif 'right' == low[-5:]: + hemisphere = 'right' + primary = low[:-5] + elif 'both' == low[-4:]: + hemisphere = 'both' + primary = low[:-4] + else: + hemisphere = 'both' + primary = low + if issurface is None: + issurface = primary == 'cortex' + if primary == '': + primary = 'all' + return cls(primary, None, hemisphere, 'surface' if issurface else 'volume') + + @classmethod + def from_gifti(cls, gifti_obj): + """ + Extracts the brain structure from a GIFTI object + """ + primary_str = 'AnatomicalStructurePrimary' + secondary_str = 'AnatomicalStructureSecondary' + primary = "other" + secondary = None + for meta in [gifti_obj] + gifti_obj.darrays: + if primary_str in meta.meta.metadata: + primary = meta.meta.metadata[primary_str] + if secondary_str in meta.meta.metadata: + secondary = meta.meta.metadata[secondary_str] + anatomy = cls.from_string(primary, issurface=True) + anatomy.secondary = None if secondary is None else secondary.lower() + return anatomy + + +def load(filename, mask_values=(0, np.nan), writable=False) -> Union[DenseCifti, ParcelCifti]: + """ + Reads CIFTI data from the given file + + File can be: + + - NIFTI file + - GIFTI file + - CIFTI file + + :param filename: input filename + :param mask_values: which values are outside of the mask for NIFTI or GIFTI input + :param writable: allow to write to disk + :return: appropriate CIFTI sub-class (parcellated or dense) + """ + possible_extensions = ( + tuple(dense_extensions.values()) + + tuple(parcel_extensions.values()) + + tuple(image.ALLOWED_EXTENSIONS) + + ('.shape.gii', '.gii') + ) + if isinstance(filename, str): + filename = addExt(filename, possible_extensions, fileGroups=image.FILE_GROUPS) + img = nib.load(filename) + else: + img = filename + + if isinstance(img, nib.Cifti2Image): + return Cifti.from_cifti(img, writable=writable) + if isinstance(img, nib.GiftiImage): + if writable: + raise ValueError("Can not open GIFTI file in writable mode") + return Cifti.from_gifti(img, mask_values) + try: + vol_img = image.Image(img) + except ValueError: + raise ValueError(f"I do not know how to convert {type(img)} into greyordinates (from {filename})") + if writable: + raise ValueError("Can not open NIFTI file in writable mode") + return Cifti.from_image(vol_img, mask_values) + diff --git a/tests/test_cifti.py b/tests/test_cifti.py new file mode 100644 index 0000000000000000000000000000000000000000..d0b758f2bd1ff6baec7c9eece4023795583e5340 --- /dev/null +++ b/tests/test_cifti.py @@ -0,0 +1,264 @@ +from fsl.data import cifti +import os.path as op +import numpy as np +import nibabel as nib +from numpy import testing +import tests +from nibabel.cifti2 import cifti2_axes + + +def volumetric_brain_model(): + mask = np.random.randint(2, size=(10, 10, 10)) > 0 + return cifti2_axes.BrainModelAxis.from_mask(mask, affine=np.eye(4)) + + +def surface_brain_model(): + mask = np.random.randint(2, size=100) > 0 + return cifti2_axes.BrainModelAxis.from_mask(mask, name='cortex') + + +def volumetric_parcels(return_mask=False): + mask = np.random.randint(5, size=(10, 10, 10)) + axis = cifti2_axes.ParcelsAxis( + [f'vol_{idx}' for idx in range(1, 5)], + voxels=[np.stack(np.where(mask == idx), axis=-1) for idx in range(1, 5)], + vertices=[{} for _ in range(1, 5)], + volume_shape=mask.shape, + affine=np.eye(4), + ) + if return_mask: + return axis, mask + else: + return axis + + +def surface_parcels(return_mask=False): + mask = np.random.randint(5, size=100) + axis = cifti2_axes.ParcelsAxis( + [f'surf_{idx}' for idx in range(1, 5)], + voxels=[np.zeros((0, 3), dtype=int) for _ in range(1, 5)], + vertices=[{'CIFTI_STRUCTURE_CORTEX': np.where(mask == idx)[0]} for idx in range(1, 5)], + nvertices={'CIFTI_STRUCTURE_CORTEX': 100}, + ) + if return_mask: + return axis, mask + else: + return axis + + +def gen_data(axes): + return np.random.randn(*(5 if ax is None else len(ax) for ax in axes)) + + +def test_read_gifti(): + testdir = op.join(op.dirname(__file__), 'testdata') + + shapefile = op.join(testdir, 'example.shape.gii') + ref_data = nib.load(shapefile).darrays[0].data + + data = cifti.load(shapefile) + assert isinstance(data, cifti.DenseCifti) + assert data.arr.shape == (642, ) + testing.assert_equal(data.arr, ref_data) + testing.assert_equal(data.brain_model_axis.vertex, np.arange(642)) + assert len(data.brain_model_axis.nvertices) == 1 + assert data.brain_model_axis.nvertices['CIFTI_STRUCTURE_OTHER'] == 642 + + data = cifti.load(shapefile, mask_values=(ref_data[0], )) + assert isinstance(data, cifti.DenseCifti) + assert data.arr.shape == (np.sum(ref_data != ref_data[0]), ) + testing.assert_equal(data.arr, ref_data[ref_data != ref_data[0]]) + testing.assert_equal(data.brain_model_axis.vertex, np.where(ref_data != ref_data[0])[0]) + assert len(data.brain_model_axis.nvertices) == 1 + assert data.brain_model_axis.nvertices['CIFTI_STRUCTURE_OTHER'] == 642 + + cifti.load(op.join(testdir, 'example')) + + +def test_read_nifti(): + mask = np.random.randint(2, size=(10, 10, 10)) > 0 + values = np.random.randn(10, 10, 10) + for mask_val in (0, np.nan): + values[~mask] = mask_val + affine = np.concatenate((np.random.randn(3, 4), np.array([0, 0, 0, 1])[None, :]), axis=0) + with tests.testdir(): + nib.Nifti1Image(values, affine).to_filename("masked_image.nii.gz") + data = cifti.load("masked_image") + assert isinstance(data, cifti.DenseCifti) + testing.assert_equal(data.arr, values[mask]) + testing.assert_allclose(data.brain_model_axis.affine, affine) + assert len(data.brain_model_axis.nvertices) == 0 + + +def check_io(data: cifti.Cifti, extension): + with tests.testdir(): + data.save("test") + assert op.isfile(f'test.{extension}.nii') + loaded = cifti.load("test") + if data.arr.ndim == 1: + testing.assert_equal(data.arr, loaded.arr[0]) + assert data.axes == loaded.axes[1:] + else: + testing.assert_equal(data.arr, loaded.arr) + assert data.axes == loaded.axes + + +def test_io_cifti(): + for cifti_class, cifti_type, main_axis_options in ( + (cifti.DenseCifti, 'd', (volumetric_brain_model(), surface_brain_model(), + volumetric_brain_model() + surface_brain_model())), + (cifti.ParcelCifti, 'p', (volumetric_parcels(), surface_parcels(), + volumetric_parcels() + surface_parcels())), + ): + for main_axis in main_axis_options: + with tests.testdir(): + data_1d = cifti_class(gen_data([main_axis]), [main_axis]) + check_io(data_1d, f'{cifti_type}scalar') + + connectome = cifti_class(gen_data([main_axis, main_axis]), (main_axis, main_axis)) + check_io(connectome, f'{cifti_type}conn') + + scalar_axis = cifti2_axes.ScalarAxis(['A', 'B', 'C']) + scalar = cifti_class(gen_data([scalar_axis, main_axis]), (scalar_axis, main_axis)) + check_io(scalar, f'{cifti_type}scalar') + + label_axis = cifti2_axes.LabelAxis(['A', 'B', 'C'], {1: ('some parcel', (1, 0, 0, 1))}) + label = cifti_class(gen_data([label_axis, main_axis]), (label_axis, main_axis)) + check_io(label, f'{cifti_type}label') + + series_axis = cifti2_axes.SeriesAxis(10, 3, 50, unit='HERTZ') + series = cifti_class(gen_data([series_axis, main_axis]), (series_axis, main_axis)) + check_io(series, f'{cifti_type}tseries') + + if cifti_type == 'd': + parcel_axis = surface_parcels() + dpconn = cifti_class(gen_data([parcel_axis, main_axis]), (parcel_axis, main_axis)) + check_io(dpconn, 'dpconn') + else: + dense_axis = surface_brain_model() + pdconn = cifti_class(gen_data([dense_axis, main_axis]), (dense_axis, main_axis)) + check_io(pdconn, 'pdconn') + + +def test_extract_dense(): + vol_bm = volumetric_brain_model() + surf_bm = surface_brain_model() + for bm in (vol_bm + surf_bm, surf_bm + vol_bm): + for ndim, no_other_axis in ((1, True), (2, False), (2, True)): + if ndim == 1: + data = cifti.DenseCifti(gen_data([bm]), [bm]) + else: + scl = cifti2_axes.ScalarAxis(['A', 'B', 'C']) + data = cifti.DenseCifti(gen_data([scl, bm]), + [None if no_other_axis else scl, bm]) + + # extract volume + ref_arr = data.arr[..., data.brain_model_axis.volume_mask] + vol_image = data.to_image(fill=np.nan) + if ndim == 1: + assert vol_image.shape == data.brain_model_axis.volume_shape + else: + assert vol_image.shape == data.brain_model_axis.volume_shape + (3, ) + assert np.isfinite(vol_image.data).sum() == len(vol_bm) * (3 if ndim == 2 else 1) + testing.assert_equal(vol_image.data[tuple(vol_bm.voxel.T)], ref_arr.T) + + from_image = cifti.DenseCifti.from_image(vol_image) + assert from_image.brain_model_axis == vol_bm + testing.assert_equal(from_image.arr, ref_arr) + + # extract surface + ref_arr = data.arr[..., data.brain_model_axis.surface_mask] + mask, surf_data = data.surface('cortex', partial=True) + assert surf_data.shape[-1] < 100 + testing.assert_equal(ref_arr, surf_data) + testing.assert_equal(surf_bm.vertex, mask) + + surf_data_full = data.surface('cortex', fill=np.nan) + assert surf_data_full.shape[-1] == 100 + mask_full = np.isfinite(surf_data_full) + if ndim == 2: + assert (mask_full.any(0) == mask_full.all(0)).all() + mask_full = mask_full[0] + assert mask_full.sum() == len(surf_bm) + assert mask_full[..., mask].sum() == len(surf_bm) + testing.assert_equal(surf_data_full[..., mask_full], ref_arr) + + +def test_extract_parcel(): + vol_parcel, vol_mask = volumetric_parcels(return_mask=True) + surf_parcel, surf_mask = surface_parcels(return_mask=True) + parcel = vol_parcel + surf_parcel + for ndim, no_other_axis in ((1, True), (2, False), (2, True)): + if ndim == 1: + data = cifti.ParcelCifti(gen_data([parcel]), [parcel]) + else: + scl = cifti2_axes.ScalarAxis(['A', 'B', 'C']) + data = cifti.ParcelCifti(gen_data([scl, parcel]), + [None if no_other_axis else scl, parcel]) + + # extract volume + vol_image = data.to_image(fill=np.nan) + if ndim == 1: + assert vol_image.shape == data.parcel_axis.volume_shape + else: + assert vol_image.shape == data.parcel_axis.volume_shape + (3, ) + assert np.isfinite(vol_image.data).sum() == np.sum(vol_mask != 0) * (3 if ndim == 2 else 1) + if ndim == 1: + testing.assert_equal(vol_mask != 0, np.isfinite(vol_image.data)) + for idx in range(1, 5): + testing.assert_allclose(vol_image.data[vol_mask == idx], data.arr[..., idx - 1]) + else: + for idx in range(3): + testing.assert_equal(vol_mask != 0, np.isfinite(vol_image.data[..., idx])) + for idx2 in range(1, 5): + testing.assert_allclose(vol_image.data[vol_mask == idx2, idx], data.arr[idx, idx2 - 1]) + + # extract surface + mask, surf_data = data.surface('cortex', partial=True) + assert surf_data.shape[-1] == (surf_mask != 0).sum() + assert (surf_mask[mask] != 0).all() + print(data.arr) + for idx in range(1, 5): + if ndim == 1: + testing.assert_equal(surf_data.T[surf_mask[mask] == idx], data.arr[idx + 3]) + else: + for idx2 in range(3): + testing.assert_equal(surf_data.T[surf_mask[mask] == idx, idx2], data.arr[idx2, idx + 3]) + + surf_data_full = data.surface('cortex', partial=False) + assert surf_data_full.shape[-1] == 100 + if ndim == 1: + testing.assert_equal(np.isfinite(surf_data_full), surf_mask != 0) + for idx in range(1, 5): + testing.assert_equal(surf_data_full.T[surf_mask == idx], data.arr[idx + 3]) + else: + for idx2 in range(3): + testing.assert_equal(np.isfinite(surf_data_full)[idx2], (surf_mask != 0)) + for idx in range(1, 5): + testing.assert_equal(surf_data_full.T[surf_mask == idx, idx2], data.arr[idx2, idx + 3]) + + +def test_brainstructure(): + for primary in ['cortex', 'cerebellum']: + for secondary in [None, 'white', 'pial']: + for gtype in [None, 'volume', 'surface']: + for orientation in ['left', 'right', 'both']: + bst = cifti.BrainStructure(primary, secondary, orientation, gtype) + print(bst.cifti) + assert bst.cifti == 'CIFTI_STRUCTURE_%s%s' % (primary.upper(), '' if orientation == 'both' else '_' + orientation.upper()) + assert bst.gifti['AnatomicalStructurePrimary'][:len(primary)] == primary.capitalize() + assert len(bst.gifti) == (1 if secondary is None else 2) + if secondary is not None: + assert bst.gifti['AnatomicalStructureSecondary'] == secondary.capitalize() + assert bst == cifti.BrainStructure(primary, secondary, orientation, gtype) + assert bst == bst + assert bst != cifti.BrainStructure('Thalamus', secondary, orientation, gtype) + if secondary is None: + assert bst == cifti.BrainStructure(primary, 'midplane', orientation, gtype) + else: + assert bst != cifti.BrainStructure(primary, 'midplane', orientation, gtype) + if (gtype == 'volume' and primary == 'cortex') or (gtype == 'surface' and primary != 'cortex'): + assert cifti.BrainStructure.from_string(bst.cifti) != bst + else: + assert cifti.BrainStructure.from_string(bst.cifti) == bst + assert cifti.BrainStructure.from_string(bst.cifti).secondary is None