diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 4d5f30ba05f4ee8bebab99972fb925a0556519ce..15d563eeb0021411574dd267935b0310c1aedfee 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,6 +2,23 @@ This document contains the ``fslpy`` release history in reverse chronological order. +1.10.1 (Friday August 3rd 2018) +------------------------------- + + +Changed +^^^^^^^ + + +* Minor adjustmenets to improve Windows compatibility. + + +Fixed +^^^^^ + +* The .mod:`.FEATImage.getCOPE` method was returning PE images. + + 1.10.0 (Wednesday July 18th 2018) --------------------------------- diff --git a/fsl/data/featanalysis.py b/fsl/data/featanalysis.py index 39e40c9122c163d68adbfe0d308bd9146ef37306..246722b576e47abb5acf9de89cc714c952e1d421 100644 --- a/fsl/data/featanalysis.py +++ b/fsl/data/featanalysis.py @@ -357,6 +357,9 @@ def loadClusterResults(featdir, settings, contrast): clusterFile = op.join( featdir, 'cluster_zstat{}_std.txt'.format(contrast + 1)) + if not op.exists(clusterFile): + return None + # In higher levle analysis run in some standard # space, the cluster coordinates are in standard # space. We transform them to voxel coordinates. @@ -365,9 +368,6 @@ def loadClusterResults(featdir, settings, contrast): getDataFile(featdir), loadData=False).worldToVoxMat - if not op.exists(clusterFile): - return None - log.debug('Loading cluster results for contrast {} from {}'.format( contrast, clusterFile)) @@ -427,48 +427,49 @@ def loadClusterResults(featdir, settings, contrast): # whitespace, and discarding # empty lines lines = f.readlines() - lines = [l.strip() for l in lines] - lines = [l for l in lines if l != ''] - - # the first line should contain column - # names, and each other line should - # contain the data for one cluster - colNames = lines[0] - clusterLines = lines[1:] - - # each line should be tab-separated - colNames = colNames.split('\t') - clusterLines = [cl .split('\t') for cl in clusterLines] - - # Turn each cluster line into a - # Cluster instance. An error will - # be raised if the columm names - # are unrecognised (i.e. not in - # the colmap above), or if the - # file is poorly formed. - clusters = [Cluster(**dict(zip(colNames, cl))) for cl in clusterLines] - - # Make sure all coordinates are in voxels - - # for first level analyses, the coordXform - # will be an identity transform (the coords - # are already in voxels). But for higher - # level, the coords are in mm, and need to - # be transformed to voxels. - for c in clusters: - - zmax = [c.zmaxx, c.zmaxy, c.zmaxz] - zcog = [c.zcogx, c.zcogy, c.zcogz] - copemax = [c.copemaxx, c.copemaxy, c.copemaxz] - - zmax = transform.transform([zmax], coordXform)[0].round() - zcog = transform.transform([zcog], coordXform)[0].round() - copemax = transform.transform([copemax], coordXform)[0].round() - - c.zmaxx, c.zmaxy, c.zmaxz = zmax - c.zcogx, c.zcogy, c.zcogz = zcog - c.copemax, c.copemaxy, c.copemaxz = copemax - - return clusters + + lines = [line.strip() for line in lines] + lines = [line for line in lines if line != ''] + + # the first line should contain column + # names, and each other line should + # contain the data for one cluster + colNames = lines[0] + clusterLines = lines[1:] + + # each line should be tab-separated + colNames = colNames.split('\t') + clusterLines = [cl .split('\t') for cl in clusterLines] + + # Turn each cluster line into a + # Cluster instance. An error will + # be raised if the columm names + # are unrecognised (i.e. not in + # the colmap above), or if the + # file is poorly formed. + clusters = [Cluster(**dict(zip(colNames, cl))) for cl in clusterLines] + + # Make sure all coordinates are in voxels - + # for first level analyses, the coordXform + # will be an identity transform (the coords + # are already in voxels). But for higher + # level, the coords are in mm, and need to + # be transformed to voxels. + for c in clusters: + + zmax = [c.zmaxx, c.zmaxy, c.zmaxz] + zcog = [c.zcogx, c.zcogy, c.zcogz] + copemax = [c.copemaxx, c.copemaxy, c.copemaxz] + + zmax = transform.transform([zmax], coordXform)[0].round() + zcog = transform.transform([zcog], coordXform)[0].round() + copemax = transform.transform([copemax], coordXform)[0].round() + + c.zmaxx, c.zmaxy, c.zmaxz = zmax + c.zcogx, c.zcogy, c.zcogz = zcog + c.copemax, c.copemaxy, c.copemaxz = copemax + + return clusters def getDataFile(featdir): diff --git a/fsl/data/featdesign.py b/fsl/data/featdesign.py index e28f038c6bb3dfb864689aff8eb99f5e15a5fdac..68e7e398ed6d066c506f0365482e07daeb1d21f9 100644 --- a/fsl/data/featdesign.py +++ b/fsl/data/featdesign.py @@ -169,30 +169,15 @@ class FEATFSFDesign(object): if level == 1: getEVs = getFirstLevelEVs else: getEVs = getHigherLevelEVs - self.__settings = collections.OrderedDict(settings.items()) - self.__design = np.array(designMatrix) - self.__numEVs = self.__design.shape[1] - self.__evs = getEVs(featDir, self.__settings, self.__design) + self.__loadVoxEVs = loadVoxelwiseEVs + self.__settings = collections.OrderedDict(settings.items()) + self.__design = np.array(designMatrix) + self.__numEVs = self.__design.shape[1] + self.__evs = getEVs(featDir, self.__settings, self.__design) if len(self.__evs) != self.__numEVs: raise FSFError('Number of EVs does not match design.mat') - # Load the voxelwise images now, - # so they're ready to be used by - # the getDesign method. - for ev in self.__evs: - - if not isinstance(ev, (VoxelwiseEV, VoxelwiseConfoundEV)): - continue - - ev.image = None - - # The path to some voxelwise - # EVs may not be present - - # see the VoxelwisEV class. - if loadVoxelwiseEVs and (ev.filename is not None): - ev.image = fslimage.Image(ev.filename) - def getEVs(self): """Returns a list containing the :class:`EV` instances that represent @@ -224,7 +209,7 @@ class FEATFSFDesign(object): if not isinstance(ev, (VoxelwiseEV, VoxelwiseConfoundEV)): continue - if ev.image is None: + if (not self.__loadVoxEVs) or (ev.filename is None): log.warning('Voxel EV image missing ' 'for ev {}'.format(ev.index)) continue @@ -300,14 +285,15 @@ class VoxelwiseEV(NormalEV): ============ ====================================================== ``filename`` Path to the image file containing the data for this EV + ``image`` Reference to the :class:`.Image` object ============ ====================================================== .. note:: The file for voxelwise EVs in a higher level analysis are not copied into the FEAT directory, so if the user has removed them, or moved the .gfeat directory, the file path here will not be valid. Therefore, a ``VoxelwiseEV`` will test to see if the - file exists, and will set the ``filename`` attribute to ``None`` - it it does not exist. + file exists, and will set the ``filename`` and ``image`` + attributes to ``None`` it it does not exist. """ def __init__(self, realIdx, origIdx, title, filename): @@ -330,6 +316,27 @@ class VoxelwiseEV(NormalEV): 'exist: {}'.format(filename)) self.filename = None + self.__image = None + + + def __del__(self): + """Clears any reference to the voxelwise EV image. """ + self.__image = None + + + @property + def image(self): + """Returns the :class:`.Image` containing the voxelwise EV data. """ + + if self.filename is None: + return None + + if self.__image is not None: + return self.__image + + self.__image = fslimage.Image(self.filename, mmap=False) + return self.__image + class ConfoundEV(EV): """Class representing a confound EV. diff --git a/fsl/data/featimage.py b/fsl/data/featimage.py index 0b37540ba55b98c6c1bf8260e01e2b279701642b..80aad19893ae4c504d2a0828e003c67134588fb0 100644 --- a/fsl/data/featimage.py +++ b/fsl/data/featimage.py @@ -95,6 +95,16 @@ class FEATImage(fslimage.Image): self.name = '{}: {}'.format(self.__analysisName, self.name) + def __del__(self): + """Clears references to any loaded images.""" + self.__design = None + self.__residuals = None + self.__pes = None + self.__copes = None + self.__zstats = None + self.__clustMasks = None + + def getFEATDir(self): """Returns the FEAT directory this image is contained in.""" return self.__featDir @@ -245,14 +255,13 @@ class FEATImage(fslimage.Image): """Returns the COPE image for the given contrast (0-indexed). """ if self.__copes[con] is None: - copefile = featanalysis.getPEFile(self.__featDir, con) + copefile = featanalysis.getCOPEFile(self.__featDir, con) self.__copes[con] = fslimage.Image( copefile, name='{}: COPE{} ({})'.format( self.__analysisName, con + 1, self.contrastNames()[con])) - return self.__copes[con] diff --git a/fsl/data/image.py b/fsl/data/image.py index 274273f2657e8532d84c1b9fab36afa5a2d75384..0a130dcaad1169a847f5900fb27ac8f7081ee065 100644 --- a/fsl/data/image.py +++ b/fsl/data/image.py @@ -986,11 +986,13 @@ class Image(Nifti): def __del__(self): """Closes any open file handles, and clears some references. """ + self.header = None self.__nibImage = None self.__imageWrapper = None if getattr(self, '__fileobj', None) is not None: self.__fileobj.close() + self.__fileobj = None def getImageWrapper(self): diff --git a/fsl/utils/imcp.py b/fsl/utils/imcp.py index 047f1f2f2ca65dad5d0b2c31a0b706da14ee4577..8950e074d00cb95e7d02c6b2ec778bdb1b2135e6 100644 --- a/fsl/utils/imcp.py +++ b/fsl/utils/imcp.py @@ -137,6 +137,7 @@ def imcp(src, img = nib.load(src) nib.save(img, dest) + img = None if move: # if input is an image pair, we diff --git a/fsl/utils/path.py b/fsl/utils/path.py index fc8efeadb4426a245a72ffe60a63cb6264784d8d..4190865a820dab8b52b8119ed305c0d17006a721 100644 --- a/fsl/utils/path.py +++ b/fsl/utils/path.py @@ -65,7 +65,7 @@ def shallowest(path, suffixes): path = path.strip() # We've reached the root of the file system - if path == op.sep or path == '': + if path == op.sep or path == '' or op.splitdrive(path)[1] == '': return None path = path.rstrip(op.sep) diff --git a/fsl/utils/run.py b/fsl/utils/run.py index 4c45f6aa8de215331903f84d5ec372d6b51d8283..a5b9df816f9605edf62b8daa62d082142a898057 100644 --- a/fsl/utils/run.py +++ b/fsl/utils/run.py @@ -6,6 +6,9 @@ # """This module provides some functions for running shell commands. +.. note:: The functions in this module are only known to work in Unix-like + environments. + .. autosummary:: :nosignatures: diff --git a/fsl/wrappers/wrapperutils.py b/fsl/wrappers/wrapperutils.py index 2ef47df62963497f0774e0562dd815880151d040..6728a23272696f295227291d723101cd366e7be5 100644 --- a/fsl/wrappers/wrapperutils.py +++ b/fsl/wrappers/wrapperutils.py @@ -91,7 +91,6 @@ import os import re import sys import glob -import shutil import random import string import fnmatch @@ -864,6 +863,7 @@ class _FileOrThing(object): fval = self.__load(fullpath) if fval is not None: prefixed = self.__removeExt(prefixed) + prefPat = prefPat.replace('\\', '\\\\') prefixed = re.sub('^' + prefPat, prefName, prefixed) result[prefixed] = fval break @@ -915,7 +915,7 @@ def fileOrImage(*args, **kwargs): # create an independent in-memory # copy of the image file - img = nib.load(path) + img = nib.load(path, mmap=False) # if any arguments were fsl images, # that takes precedence. diff --git a/tests/__init__.py b/tests/__init__.py index eb16fa95ab31bdcce6494524b28a481493cf7cb1..4960e909532e5d5f1a970fd9f44368ed23c7d31e 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -294,7 +294,8 @@ def make_mock_feat_analysis(featdir, if indata: filtfunc = op.join(featdir, 'filtered_func_data.nii.gz') - make_random_image(filtfunc, shape4D, xform) + img = make_random_image(filtfunc, shape4D, xform) + del img # and some dummy voxelwise EV files if voxEVs: @@ -311,7 +312,10 @@ def make_mock_feat_analysis(featdir, data = data.reshape(list(shape) + [1]).repeat(timepoints, axis=3) data[..., :] += range(i, i + timepoints) - nib.save(nib.nifti1.Nifti1Image(data, xform), vf) + img = nib.nifti1.Nifti1Image(data, xform) + + nib.save(img, vf) + del img otherFiles = [] otherShapes = [] @@ -342,7 +346,8 @@ def make_mock_feat_analysis(featdir, otherShapes.extend([shape] * len(files)) for f, s in zip(otherFiles, otherShapes): - make_random_image(f, s, xform) + img = make_random_image(f, s, xform) + del img return featdir diff --git a/tests/conftest.py b/tests/conftest.py index d3482bba1cd5cd4242c2e4bd322ab9441d15f5f6..9e87030c00303f83b50c347c7195c311ddc86dd3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -41,7 +41,7 @@ def seed(request): seed = request.config.getoption('--seed') if seed is None: - seed = np.random.randint(2 ** 32) + seed = np.random.randint(2 ** 30) np.random.seed(seed) random .seed(seed) diff --git a/tests/test_dtifit.py b/tests/test_dtifit.py index a80b92ecea736690131df369f1fa5d5c80965ba9..9ae62a80a58d456d0b971ba1a728bd22eb5b123e 100644 --- a/tests/test_dtifit.py +++ b/tests/test_dtifit.py @@ -21,19 +21,19 @@ def test_getDTIFitDataPrefix_and_isDTIFitPath(): for s in suffixes: path = op.join(dir, '{}{}'.format(prefix, s)) with open(path, 'wt') as f: - f.write(path) + f.write(path) prefixes = ['dti', 'blob', 'random-prefix', '01234'] suffixes = ['_V1.nii', '_V2.nii', '_V3.nii', '_L1.nii', '_L2.nii', '_L3.nii'] badSuffixes = ['_V1.txt', '_V2.nii', '_V3.nii', - '_L1.txt', '_L2.tar', '_L3.nii'] + '_L1.txt', '_L2.tar', '_L3.nii'] # Valid dtifit directories with tests.testdir() as testdir: for p in prefixes: - + tests.cleardir(testdir) make_dtifit_dir(testdir, p, suffixes) assert dtifit.getDTIFitDataPrefix(testdir) == p @@ -82,6 +82,7 @@ def test_looksLikeTensorImage(): img = fslimage.Image(fname) assert dtifit.looksLikeTensorImage(img) == expected + img = None def test_decomposeTensorMatrix(): @@ -102,7 +103,7 @@ def test_decomposeTensorMatrix(): [[ 0.701921939849854, -0.711941838264465, 0.021080270409584], [-0.700381875038147, -0.695301055908203, -0.16131255030632 ], [-0.129502296447754, -0.098464585840702, 0.986678183078766]], - [[-0.993700802326202, -0.104962401092052, -0.039262764155865], + [[-0.993700802326202, -0.104962401092052, -0.039262764155865], [-0.081384353339672, 0.916762292385101, -0.391054302453995], [-0.077040620148182, 0.385395616292953, 0.919529736042023]], [[ 0.068294189870358, -0.666985750198364, 0.741933941841125], @@ -112,7 +113,7 @@ def test_decomposeTensorMatrix(): tensorMatrices = tensorMatrices.reshape(1, 1, 3, 6) expEigVals = expEigVals .reshape(1, 1, 3, 3) expEigVecs = expEigVecs .reshape(1, 1, 3, 3, 3) - + v1, v2, v3, l1, l2, l3 = dtifit.decomposeTensorMatrix(tensorMatrices) expV1 = expEigVecs[:, :, :, 0] @@ -129,7 +130,7 @@ def test_decomposeTensorMatrix(): # Vector signs are arbitrary for vox in range(3): for resvec, expvec in zip([v1, v2, v3], [expV1, expV2, expV3]): - + resvec = resvec[:, :, vox] expvec = expvec[:, :, vox] @@ -172,3 +173,7 @@ def test_DTIFitTensor(): assert np.all(np.isclose(dtiobj.voxToWorldMat, v1.voxToWorldMat)) assert np.all(np.isclose(dtiobj.shape[:3], v1.shape[:3])) assert np.all(np.isclose(dtiobj.pixdim[:3], v1.pixdim[:3])) + + del v1 + del dtiobj + v1 = None diff --git a/tests/test_ensure.py b/tests/test_ensure.py index 865c205bb1c2396e41072c674ea7f688871f5de5..961377d39b841b52cb6d804deff119021edd526c 100644 --- a/tests/test_ensure.py +++ b/tests/test_ensure.py @@ -28,3 +28,7 @@ def test_ensureIsImage(): for l in loaded: assert isinstance(l, nib.nifti1.Nifti1Image) assert np.all(img.get_data() == l.get_data()) + + l = None + loaded = None + img = None diff --git a/tests/test_featdesign.py b/tests/test_featdesign.py index f685ff712364c6338faaade5af3cea151ccc87a5..b10aa23091954d0dd4b840d0c11132d3427bd8ab 100644 --- a/tests/test_featdesign.py +++ b/tests/test_featdesign.py @@ -103,17 +103,12 @@ with the following commands: """ -import os -import os.path as op -import itertools as it -import glob -import shutil -import numpy as np +import os.path as op +import numpy as np import pytest import tests -import fsl.data.image as fslimage import fsl.data.featdesign as featdesign import fsl.data.featanalysis as featanalysis @@ -172,6 +167,9 @@ def test_FEATFSFDesign(): assert des.getDesign().shape == desshape assert des.getDesign(rvox).shape == desshape + del des + des = None + def test_FEATFSFDesign_firstLevelVoxelwiseEV(seed): @@ -214,6 +212,8 @@ def test_FEATFSFDesign_firstLevelVoxelwiseEV(seed): for i, evidx in enumerate(voxevIdxs): expect = np.arange(i, i + 45) + offset assert np.all(np.isclose(matrix[:, evidx], expect)) + del design + design = None def test_getFirstLevelEVs_1(): @@ -240,6 +240,8 @@ def test_getFirstLevelEVs_1(): assert isinstance(evs[i], evtype) for k, v in atts.items(): assert getattr(evs[i], k) == v + del evs + evs = None def test_getFirstLevelEVs_2(): @@ -267,6 +269,8 @@ def test_getFirstLevelEVs_2(): assert isinstance(evs[i], evtype) for k, v in atts.items(): assert getattr(evs[i], k) == v + del evs + evs = None def test_getFirstLevelEVs_3(): @@ -307,7 +311,6 @@ def test_getFirstLevelEVs_3(): (featdesign.ConfoundEV, {'index' : 30, 'confIndex' : 0}), (featdesign.ConfoundEV, {'index' : 31, 'confIndex' : 1})] - evs = featdesign.getFirstLevelEVs(featdir, settings, matrix) assert len(evs) == 32 @@ -318,6 +321,10 @@ def test_getFirstLevelEVs_3(): for k, v in atts.items(): assert getattr(evs[i], k) == v + del evs + evs = None + + def test_getFirstLevelEVs_realdata(): featdir = op.join(datadir, '1stlevel_realdata.feat') settings = featanalysis.loadSettings(featdir) @@ -336,7 +343,8 @@ def test_getFirstLevelEVs_realdata(): assert isinstance(evs[i], evtype) for k, v in atts.items(): assert getattr(evs[i], k) == v - + del evs + evs = None def test_getHigherLevelEVs_1(): @@ -351,8 +359,8 @@ def test_getHigherLevelEVs_1(): assert isinstance(evs[0], featdesign.NormalEV) assert evs[0].index == 0 assert evs[0].origIndex == 0 - - + del evs + evs = None def test_getHigherLevelEVs_2(): @@ -368,7 +376,8 @@ def test_getHigherLevelEVs_2(): assert evs[0].index == 0 assert evs[0].origIndex == 0 assert isinstance(evs[1], featdesign.VoxelwiseEV) - + del evs + evs = None def test_loadDesignMat(): diff --git a/tests/test_featimage.py b/tests/test_featimage.py index ed75381c4d29a44676daf67060b3448ac199990b..fb33c6e4ddb13b43dbc4d1883faddf227489b305 100644 --- a/tests/test_featimage.py +++ b/tests/test_featimage.py @@ -29,6 +29,7 @@ featdirs = ['1stlevel_1.feat', '1stlevel_2.feat', '1stlevel_2.feat', '2ndlevel_2.gfeat/cope1.feat', '2ndlevel_2.gfeat/cope2.feat', '2ndlevel_realdata.gfeat/cope1.feat', '2ndlevel_realdata.gfeat/cope2.feat'] +featdirs = [op.join(*d.split('/')) for d in featdirs] shapes = [(64, 64, 5, 45), (64, 64, 5, 45), (64, 64, 5, 45), @@ -90,7 +91,7 @@ def test_FEATImage_attributes(): clustMasks=False) else: featdir = op.join(datadir, featdir) - + # Now create a FEATImage. We validate its # attributes against the values returned by # the functions in featdesign/featanalysis. @@ -125,12 +126,16 @@ def test_FEATImage_attributes(): expect = featanalysis.loadClusterResults(featdir, settings, ci) assert len(result) == len(expect) assert all([rc.nvoxels == ec.nvoxels for rc, ec in zip(result, expect)]) + del design + del fi + fi = None + def test_FEATImage_imageAccessors(): for featdir in TEST_ANALYSES.keys(): - + shape = TEST_ANALYSES[featdir]['shape'] xform = TEST_ANALYSES[featdir]['xform'] @@ -152,25 +157,26 @@ def test_FEATImage_imageAccessors(): nevs = fi.numEVs() ncons = fi.numContrasts() - # Testing the FEATImage intenral cache + # Testing the FEATImage internal cache for i in range(2): assert fi.getResiduals().shape == shape4D - for ev in range(nevs): assert fi.getPE(ev).shape == shape for con in range(ncons): assert fi.getCOPE( con).shape == shape assert fi.getZStats( con).shape == shape assert fi.getClusterMask(con).shape == shape - + del fi + fi = None + def test_FEATImage_nostats(): - + featdir = op.join(datadir, '1stlevel_nostats.feat') shape = (4, 4, 5, 45) with tests.testdir() as testdir: - + featdir = tests.make_mock_feat_analysis(featdir, testdir, shape) fi = featimage.FEATImage(featdir) @@ -181,13 +187,15 @@ def test_FEATImage_nostats(): with pytest.raises(Exception): fi.fit([1, 2, 3], (2, 2, 2)) - + with pytest.raises(Exception): - fi.partialFit([1, 2, 3], (2, 2, 2)) - + fi.partialFit([1, 2, 3], (2, 2, 2)) + del fi + fi = None + def test_FEATImage_fit_firstLevel(): - + featdir = op.join(datadir, '1stlevel_realdata.feat') fi = featimage.FEATImage(featdir) expect = np.array([ @@ -202,8 +210,8 @@ def test_FEATImage_fit_firstLevel(): 10287.91883737, 10325.38456267, 10341.92299781, 10347.17916861, 10348.58339616, 10348.89634025, 10348.93522057, 10345.25397481, 10288.9236822 , 10315.64160242, 10449.39567496, 10558.66999883, - 10597.64918744]) - + 10597.64918744]) + # bad contrast with pytest.raises(Exception): fi.fit([1, 2, 3, 4, 5, 6, 7], (2, 2, 2)) @@ -214,20 +222,24 @@ def test_FEATImage_fit_firstLevel(): result = fi.fit([1, 1, 1, 1], (2, 2, 2)) assert np.all(np.isclose(result, expect)) + del fi + fi = None def test_FEATImage_fit_higherLevel(): - + featdir = op.join(datadir, '2ndlevel_realdata.gfeat/cope1.feat') fi = featimage.FEATImage(featdir) expect = np.array([86.37929535, 86.37929535, 86.37929535]) result = fi.fit([1], (5, 5, 5)) - + assert np.all(np.isclose(result, expect)) + del fi + fi = None def test_FEATImage_partialFit(): - + featdir = op.join(datadir, '1stlevel_realdata.feat') fi = featimage.FEATImage(featdir) expect = np.array([ @@ -244,14 +256,16 @@ def test_FEATImage_partialFit(): 10203.21032619, 10136.1942605 , 10128.23728873, 10416.78984136, 10118.51262128]) result = fi.partialFit([1, 1, 1, 1], (2, 2, 2)) - + assert np.all(np.isclose(result, expect)) + del fi + fi = None def test_modelFit(seed): for i in range(500): - + # 2 evs, 20 timepoints # First EV is a boxcar, # second is a random regressor @@ -264,12 +278,12 @@ def test_modelFit(seed): design[:, ev] = design[:, ev] - design[:, ev].mean() # Generate some random PEs, and - # generate the data that would + # generate the data that would # have resulted in them pes = np.random.random(2) expect = np.dot(design, pes) contrast = [1] * design.shape[1] - + result1 = featimage.modelFit(expect, design, contrast, pes, True) result2 = featimage.modelFit(expect, design, contrast, pes, False) diff --git a/tests/test_freesurfer.py b/tests/test_freesurfer.py index b0f9bb638bc25758fbd46d6e6683e3a4535e876c..36fc71cabbcb625ce0ff8e3268851d80ef83774e 100644 --- a/tests/test_freesurfer.py +++ b/tests/test_freesurfer.py @@ -167,10 +167,9 @@ def test_loadVertexData_mgh(): mesh = fslfs.FreesurferMesh('lh.pial') assert np.all(np.isclose(mesh.loadVertexData('lh.vdata.mgh'), data.reshape(-1, 1))) - - - - + del img + del mesh + img = None def test_loadVertexData_annot(): diff --git a/tests/test_fsl_utils_path.py b/tests/test_fsl_utils_path.py index 6bb54fcd8b55ab2b78225b5d2d2d26a9e1e8ac2a..0820f4ed642c9499047b6a02f811027d52522d15 100644 --- a/tests/test_fsl_utils_path.py +++ b/tests/test_fsl_utils_path.py @@ -75,6 +75,10 @@ def test_deepest(): ] for path, suffixes, output in tests: + + path = op.join(*path.split('/')) + if output is not None: + output = op.join(*output.split('/')) assert fslpath.deepest(path, suffixes) == output @@ -107,6 +111,11 @@ def test_shallowest(): ] for path, suffixes, output in tests: + + path = op.join(*path.split('/')) + if output is not None: + output = op.join(*output.split('/')) + assert fslpath.shallowest(path, suffixes) == output @@ -120,6 +129,8 @@ def test_allFiles(): 'a/b/d/1', ] + create = [op.join(*c.split('/')) for c in create] + with testdir(create) as td: assert (sorted(fslpath.allFiles('.')) == sorted([op.join('.', c) for c in create])) diff --git a/tests/test_idle.py b/tests/test_idle.py index 7959084bd7ec0782b3df45216345a174019feb59..afa45b5ad3e1236b2d4fee4644b7f8f4659c1a03 100644 --- a/tests/test_idle.py +++ b/tests/test_idle.py @@ -622,5 +622,5 @@ def test_mutex(): # Either t1 has to start and # finish before t2 or vice versa - assert (t[0].method2start > t[0].method1end or - t[0].method1start > t[0].method2end) + assert (t[0].method2start >= t[0].method1end or + t[0].method1start >= t[0].method2end) diff --git a/tests/test_image.py b/tests/test_image.py index 47a6ad4ce74461b88c5589af4c86221a26b16783..da6263912057ddd902624a89c6999d2bd805760c 100644 --- a/tests/test_image.py +++ b/tests/test_image.py @@ -314,6 +314,7 @@ def _test_Image_atts(imgtype): allowedExts=allowedExts, mustExist=True, fileGroups=fileGroups) + i = None def test_Image_atts2_analyze(): _test_Image_atts2(0) @@ -530,7 +531,7 @@ def _test_Image_orientation(imgtype, voxorient): make_image(imagefile, imgtype, (10, 10, 10), pixdims, np.float32) - image = fslimage.Image(imagefile) + image = fslimage.Image(imagefile, mmap=False) # analyze images are always assumed to be # stored in radiological (LAS) orientation @@ -566,6 +567,7 @@ def _test_Image_orientation(imgtype, voxorient): assert image.getOrientation(0, affine) == expectvox0Orientation assert image.getOrientation(1, affine) == expectvox1Orientation assert image.getOrientation(2, affine) == expectvox2Orientation + image = None def test_Image_sqforms_nifti1_normal(): _test_Image_sqforms(1, 1, 1) @@ -693,6 +695,8 @@ def _test_Image_changeXform(imgtype, sformcode=None, qformcode=None): # ANALYZE affine is not editable with pytest.raises(Exception): img.voxToWorldMat = newXform + del img + del image return img.voxToWorldMat = newXform @@ -709,6 +713,9 @@ def _test_Image_changeXform(imgtype, sformcode=None, qformcode=None): assert np.all(np.isclose(img.worldToVoxMat, invx)) assert img.getXFormCode('sform') == expSformCode assert img.getXFormCode('qform') == expQformCode + del img + del image + image = None def test_Image_changeData_analyze(seed): _test_Image_changeData(0) @@ -724,14 +731,15 @@ def _test_Image_changeData(imgtype): make_image(imagefile, imgtype) - img = fslimage.Image(imagefile) + img = fslimage.Image(imagefile, mmap=False) + shape = img.shape notified = {} def randvox(): - return (np.random.randint(0, img.shape[0]), - np.random.randint(0, img.shape[1]), - np.random.randint(0, img.shape[2])) + return (np.random.randint(0, shape[0]), + np.random.randint(0, shape[1]), + np.random.randint(0, shape[2])) def onData(*a): notified['data'] = True @@ -806,6 +814,10 @@ def _test_Image_changeData(imgtype): assert notified.get('dataRange', False) assert np.isclose(img[maxx, maxy, maxz], newdmax) assert np.all(np.isclose(img.dataRange, (newdmin, newdmax))) + img.deregister('name1', 'data') + img.deregister('name2', 'data') + img.deregister('name3', 'data') + img = None def test_Image_2D_analyze(): _test_Image_2D(0) @@ -851,6 +863,7 @@ def _test_Image_2D(imgtype): assert tuple(map(float, shape)) == tuple(map(float, image .shape)) assert tuple(map(float, shape)) == tuple(map(float, image[:].shape)) assert tuple(map(float, pixdim)) == tuple(map(float, image .pixdim)) + image = None def test_Image_5D_analyze(): _test_Image_5D(0) @@ -880,6 +893,8 @@ def _test_Image_5D(imgtype): assert img.shape == dims assert img.ndim == 5 assert img.data.shape == dims + del img + img = None def test_Image_voxToScaledVox_analyze(): _test_Image_voxToScaledVox(0) @@ -917,6 +932,7 @@ def _test_Image_voxToScaledVox(imgtype): assert np.all(np.isclose(expected, img.voxToScaledVoxMat)) assert np.all(np.isclose(invexpected, img.scaledVoxToVoxMat)) + img = None def test_Image_sameSpace(): @@ -1022,6 +1038,7 @@ def _test_Image_save(imgtype): for (x, y, z), v in zip(randvoxes, randvals): assert np.isclose(img[x, y, z], v) + img2 = None def test_image_resample(seed): @@ -1035,7 +1052,7 @@ def test_image_resample(seed): shape = np.random.randint(5, 100, 3) make_random_image(fname, shape) - img = fslimage.Image(fname) + img = fslimage.Image(fname, mmap=False) # resampling to the same shape should be a no-op samei, samex = img.resample(shape) @@ -1049,7 +1066,8 @@ def test_image_resample(seed): resampled, xf = img.resample(rshape, order=0) img.save('base.nii.gz') - fslimage.Image(resampled, xform=xf).save('res.nii.gz') + fslimage.Image(resampled, xform=xf, + mmap=False).save('res.nii.gz') assert tuple(resampled.shape) == tuple(rshape) @@ -1102,6 +1120,8 @@ def test_image_resample(seed): resampled = img.resample((15, 15, 15), slc)[0] assert tuple(resampled.shape) == (15, 15, 15) + del img + img = None def test_Image_init_xform_nifti1(): _test_Image_init_xform(1) @@ -1142,7 +1162,6 @@ def _test_Image_init_xform(imgtype): assert fsform_code == sform_code assert fqform_code == qform_code - # an image created off # an xform only should # get its sform set @@ -1183,3 +1202,7 @@ def _test_Image_init_xform(imgtype): assert np.all(np.isclose(xform, rxform)) assert fsform_code == sform_code assert fqform_code == qform_code + + del fimg + del img + img = None diff --git a/tests/test_immv_imcp.py b/tests/test_immv_imcp.py index 92a30353ccda1a5d6c76a19aadcac947fea9c84b..36e9ddae17d2ad99269420606f857c496b22bb40 100644 --- a/tests/test_immv_imcp.py +++ b/tests/test_immv_imcp.py @@ -347,8 +347,10 @@ def test_imcp_script_shouldPass(move=False): ' '.join(infiles) for inf in infiles: - img = nib.load(op.join(tindir, inf)) + img = nib.load(op.join(tindir, inf), + mmap=False) imghash = hash(img.get_data().tobytes()) + img = None imageHashes.append(imghash) print('adj files_to_expect: ', files_to_expect) @@ -357,7 +359,10 @@ def test_imcp_script_shouldPass(move=False): imcp_args[:-1] = [op.join(tindir, a) for a in imcp_args[:-1]] imcp_args[ -1] = op.join(toutdir, imcp_args[-1]) - imcp_args = [op.relpath(a, reldir) for a in imcp_args] + + for i, a in enumerate(imcp_args): + if op.splitdrive(a)[0] == op.splitdrive(reldir)[0]: + imcp_args[i] = op.relpath(a, reldir) print('indir before: ', os.listdir(tindir)) print('outdir before: ', os.listdir(toutdir)) @@ -375,8 +380,6 @@ def test_imcp_script_shouldPass(move=False): # too hard if indir == outdir if move and tindir != toutdir: - real_print('indir: ', tindir) - real_print('outdir: ', toutdir) infiles = os.listdir(tindir) infiles = [f for f in infiles if op.isfile(f)] infiles = [f for f in infiles if op.isfile(f)] diff --git a/tests/test_melodicanalysis.py b/tests/test_melodicanalysis.py index 54edfd7ebcfeffebf60779edcd2bebde26d697bb..907f3c3ba0c892d17206acb156aab830acefb283 100644 --- a/tests/test_melodicanalysis.py +++ b/tests/test_melodicanalysis.py @@ -141,6 +141,9 @@ def test_getDataFile(): ] for paths, meldir, expected in testcases: + paths = [op.join(*p.split('/')) for p in paths] + if expected is not None: + expected = op.join(*expected.split('/')) with tests.testdir(paths) as testdir: assert mela.getDataFile(meldir) == expected @@ -151,9 +154,11 @@ def test_getMeanFile(): 'analysis.ica/melodic_FTmix', 'analysis.ica/mean.nii.gz'] + paths = [op.join(*p.split('/')) for p in paths] + with tests.testdir(paths) as testdir: meldir = op.join(testdir, 'analysis.ica') - expected = op.join(testdir, 'analysis.ica/mean.nii.gz') + expected = op.join(testdir, 'analysis.ica', 'mean.nii.gz') assert mela.getMeanFile(meldir) == expected @@ -162,6 +167,8 @@ def test_getMeanFile(): 'analysis.ica/melodic_FTmix', 'analysis.ica/mean.txt'] + paths = [op.join(*p.split('/')) for p in paths] + with tests.testdir(paths) as testdir: meldir = op.join(testdir, 'analysis.ica') with pytest.raises(fslpath.PathError): @@ -173,15 +180,19 @@ def test_getICFile(): 'analysis.ica/melodic_mix', 'analysis.ica/melodic_FTmix'] + paths = [op.join(*p.split('/')) for p in paths] + with tests.testdir(paths) as testdir: meldir = op.join(testdir, 'analysis.ica') - expected = op.join(testdir, 'analysis.ica/melodic_IC.nii.gz') + expected = op.join(testdir, 'analysis.ica', 'melodic_IC.nii.gz') assert mela.getICFile(meldir) == expected paths = ['analysis.ica/melodic_IC.txt', 'analysis.ica/melodic_mix', 'analysis.ica/melodic_FTmix'] + paths = [op.join(*p.split('/')) for p in paths] + with tests.testdir(paths) as testdir: meldir = op.join(testdir, 'analysis.ica') with pytest.raises(fslpath.PathError): @@ -193,13 +204,17 @@ def test_getMixFile(): 'analysis.ica/melodic_mix', 'analysis.ica/melodic_FTmix'] + paths = [op.join(*p.split('/')) for p in paths] + with tests.testdir(paths) as testdir: meldir = op.join(testdir, 'analysis.ica') - expected = op.join(testdir, 'analysis.ica/melodic_mix') + expected = op.join(testdir, 'analysis.ica', 'melodic_mix') assert mela.getMixFile(meldir) == expected paths = ['analysis.ica/melodic_IC.ni.gz', 'analysis.ica/melodic_FTmix'] + paths = [op.join(*p.split('/')) for p in paths] + with tests.testdir(paths) as testdir: meldir = op.join(testdir, 'analysis.ica') assert mela.getMixFile(meldir) is None @@ -208,14 +223,17 @@ def test_getFTMixFile(): paths = ['analysis.ica/melodic_IC.nii.gz', 'analysis.ica/melodic_mix', 'analysis.ica/melodic_FTmix'] + paths = [op.join(*p.split('/')) for p in paths] with tests.testdir(paths) as testdir: meldir = op.join(testdir, 'analysis.ica') - expected = op.join(testdir, 'analysis.ica/melodic_FTmix') + expected = op.join(testdir, 'analysis.ica', 'melodic_FTmix') assert mela.getFTMixFile(meldir) == expected paths = ['analysis.ica/melodic_IC.ni.gz', 'analysis.ica/melodic_mix'] + paths = [op.join(*p.split('/')) for p in paths] + with tests.testdir(paths) as testdir: meldir = op.join(testdir, 'analysis.ica') assert mela.getFTMixFile(meldir) is None @@ -225,15 +243,18 @@ def test_getReportFile(): 'analysis.ica/filtfunc.ica/melodic_mix', 'analysis.ica/filtfunc.ica/melodic_FTmix', 'analysis.ica/report.html'] + paths = [op.join(*p.split('/')) for p in paths] with tests.testdir(paths) as testdir: - meldir = op.join(testdir, 'analysis.ica/filtfunc.ica') - expected = op.join(testdir, 'analysis.ica/report.html') + meldir = op.join(testdir, 'analysis.ica', 'filtfunc.ica') + expected = op.join(testdir, 'analysis.ica', 'report.html') assert op.abspath(mela.getReportFile(meldir)) == expected paths = ['analysis.ica/filtfunc.ica/melodic_IC.ni.gz', 'analysis.ica/filtfunc.ica/melodic_mix', 'analysis.ica/filtfunc.ica/melodic_FTmix'] + paths = [op.join(*p.split('/')) for p in paths] + with tests.testdir(paths) as testdir: meldir = op.join(testdir, 'analysis.ica') assert mela.getReportFile(meldir) is None diff --git a/tests/test_melodicimage.py b/tests/test_melodicimage.py index 5536e418b876ed3f584bb8d84a1306e2af805564..c3f172602fdca3f0013577d834425ca46e87b5d4 100644 --- a/tests/test_melodicimage.py +++ b/tests/test_melodicimage.py @@ -98,12 +98,12 @@ def test_MelodicImage_create(): paths = ['analysis.ica/melodic_IC.nii.gz', 'analysis.ica/melodic_mix', 'analysis.ica/melodic_FTmix'] + paths = [op.join(*p.split('/')) for p in paths] with tests.testdir(paths) as testdir: - path = op.join(testdir, 'analysis.ica/melodic_IC.nii.gz') + path = op.join(testdir, 'analysis.ica', 'melodic_IC.nii.gz') with pytest.raises(Exception): meli.MelodicImage(path) - for ic_prefix in ['melodic_IC', 'melodic_oIC']: with tests.testdir() as testdir: @@ -114,9 +114,10 @@ def test_MelodicImage_create(): # Should be able to specify the # melodic dir, or the IC image - meli.MelodicImage(meldir) - meli.MelodicImage(icfile) - meli.MelodicImage(icfilenosuf) + i = meli.MelodicImage(meldir) + i = meli.MelodicImage(icfile) + i = meli.MelodicImage(icfilenosuf) + i = None def test_MelodicImage_atts(): @@ -136,6 +137,8 @@ def test_MelodicImage_atts(): assert img.getDataFile() == mela.getDataFile(meldir) assert img.getMeanFile() == mela.getMeanFile(meldir) + img = None + def test_MelodicImage_componentData(): with tests.testdir() as testdir: @@ -150,6 +153,8 @@ def test_MelodicImage_componentData(): assert np.all(img.getComponentTimeSeries( ic) == expectTS[:, ic]) assert np.all(img.getComponentPowerSpectrum(ic) == expectPS[:, ic]) + img = None + def test_MelodicImage_tr(): @@ -160,12 +165,16 @@ def test_MelodicImage_tr(): assert img.tr == 1 + img = None + # Otherwise, it should be set to the datafile tr with tests.testdir() as testdir: meldir = _create_dummy_melodic_analysis(testdir, tr=5) img = meli.MelodicImage(meldir) assert img.tr == 5 + img = None + # The TR can be updated with tests.testdir() as testdir: @@ -182,3 +191,5 @@ def test_MelodicImage_tr(): assert cbCalled[0] assert img.tr == 8 + + img = None diff --git a/tests/test_parse_data.py b/tests/test_parse_data.py index ac2d0e36b885653e2edf6c8d847d81f394348cdd..786a1b7bdea7fe609f6a6762d9835736f95cc810 100644 --- a/tests/test_parse_data.py +++ b/tests/test_parse_data.py @@ -79,6 +79,7 @@ def test_image(): image_parser.parse_args([filename + '.hdr']) with raises(SystemExit): image_parser.parse_args([filename + '.nii.gz']) + args = None double_filename = op.join(testdir, 'image1') make_image(double_filename, 0) diff --git a/tests/test_run.py b/tests/test_run.py index 9a260de5c543b71132b42b6ca716633b8dfbda1f..bc77a46fd14f494a4269ecb90d2749a5bace5ab5 100644 --- a/tests/test_run.py +++ b/tests/test_run.py @@ -27,6 +27,9 @@ import fsl.utils.fslsub as fslsub from . import make_random_image, mockFSLDIR, CaptureStdout +pytestmark = pytest.mark.unixtest + + def mkexec(path, contents): with open(path, 'wt') as f: f.write(contents) diff --git a/tests/test_settings.py b/tests/test_settings.py index 26bdb07bb91932b72d84a4f22385b0f19cdcdec6..ac3a311b4c0454e4f023e92ce8759d9b3b75ce06 100644 --- a/tests/test_settings.py +++ b/tests/test_settings.py @@ -326,6 +326,9 @@ def test_listFiles(): 'namespace2/setting2.txt', 'namespace2/setting3.txt'] + ns1files = [op.join(*f.split('/')) for f in ns1files] + ns2files = [op.join(*f.split('/')) for f in ns2files] + with tests.testdir() as testdir: s = settings.Settings(cfgid='test', cfgdir=testdir, writeOnExit=False) @@ -338,12 +341,11 @@ def test_listFiles(): assert list(sorted(s.listFiles())) == list(sorted(ns1files + ns2files)) - assert list(sorted(s.listFiles('namespace1/*'))) == list(sorted(ns1files)) - assert list(sorted(s.listFiles('namespace2/*'))) == list(sorted(ns2files)) - assert list(sorted(s.listFiles('namespace?/*'))) == list(sorted(ns1files + ns2files)) + assert list(sorted(s.listFiles(op.join('namespace1', '*')))) == list(sorted(ns1files)) + assert list(sorted(s.listFiles(op.join('namespace2', '*')))) == list(sorted(ns2files)) + assert list(sorted(s.listFiles(op.join('namespace?', '*')))) == list(sorted(ns1files + ns2files)) assert list(sorted(s.listFiles('*.txt'))) == list(sorted(ns1files + ns2files)) - - assert list(sorted(s.listFiles('*/setting1.txt'))) == list(sorted([ns1files[0]] + [ns2files[0]])) + assert list(sorted(s.listFiles(op.join('*', 'setting1.txt')))) == list(sorted([ns1files[0]] + [ns2files[0]])) def test_filePath(): @@ -351,6 +353,7 @@ def test_filePath(): testfiles = ['file1.txt', 'dir1/file2.txt', 'dir1/dir2/file3.txt'] + testfiles = [op.join(*f.split('/')) for f in testfiles] with tests.testdir() as testdir: diff --git a/tests/test_wrapperutils.py b/tests/test_wrapperutils.py index 8173fe1045f5f5aa06a3275e38391064e940c9bb..29e569c9f011cea38e9ceaf0ec1dde42eedd7785 100644 --- a/tests/test_wrapperutils.py +++ b/tests/test_wrapperutils.py @@ -26,7 +26,7 @@ import fsl.data.image as fslimage import fsl.wrappers.wrapperutils as wutils -from . import mockFSLDIR, cleardir, checkdir +from . import mockFSLDIR, cleardir, checkdir, testdir from .test_run import mock_submit @@ -469,20 +469,20 @@ def test_fileOrThing_outprefix_directory(): res = func(img, 'myout', myout_imgs=wutils.LOAD) assert len(res) == 2 - assert np.all(res['myout_imgs/img2'].get_data() == exp2) - assert np.all(res['myout_imgs/img4'].get_data() == exp4) + assert np.all(res[op.join('myout_imgs', 'img2')].get_data() == exp2) + assert np.all(res[op.join('myout_imgs', 'img4')].get_data() == exp4) - res = func(img, 'myout', **{'myout_imgs/img2' : wutils.LOAD}) + res = func(img, 'myout', **{op.join('myout_imgs', 'img2') : wutils.LOAD}) assert len(res) == 1 - assert np.all(res['myout_imgs/img2'].get_data() == exp2) + assert np.all(res[op.join('myout_imgs', 'img2')].get_data() == exp2) - res = func(img, 'myout', **{'myout_imgs/img' : wutils.LOAD}) + res = func(img, 'myout', **{op.join('myout_imgs', 'img') : wutils.LOAD}) assert len(res) == 2 - assert np.all(res['myout_imgs/img2'].get_data() == exp2) - assert np.all(res['myout_imgs/img4'].get_data() == exp4) + assert np.all(res[op.join('myout_imgs', 'img2')].get_data() == exp2) + assert np.all(res[op.join('myout_imgs', 'img4')].get_data() == exp4) os.mkdir('foo') - res = func(img, 'foo/myout') + res = func(img, op.join('foo', 'myout')) assert len(res) == 0 checkdir(td, op.join('foo', 'myout_imgs', 'img2.nii.gz'), @@ -490,10 +490,10 @@ def test_fileOrThing_outprefix_directory(): cleardir(td, 'foo') os.mkdir('foo') - res = func(img, 'foo/myout', **{'foo/myout' : wutils.LOAD}) + res = func(img, op.join('foo', 'myout'), **{op.join('foo', 'myout') : wutils.LOAD}) assert len(res) == 2 - assert np.all(res['foo/myout_imgs/img2'].get_data() == exp2) - assert np.all(res['foo/myout_imgs/img4'].get_data() == exp4) + assert np.all(res[op.join('foo', 'myout_imgs', 'img2')].get_data() == exp2) + assert np.all(res[op.join('foo', 'myout_imgs', 'img4')].get_data() == exp4) def test_chained_fileOrImageAndArray(): @@ -614,6 +614,7 @@ def _test_script_func(a, b): return ['test_script', str(a), str(b)] +@pytest.mark.unixtest def test_cmdwrapper_submit(): test_func = wutils.cmdwrapper(_test_script_func) @@ -637,6 +638,7 @@ def test_cmdwrapper_submit(): assert stderr.strip() == '' +@pytest.mark.unixtest def test_fslwrapper_submit(): test_func = wutils.fslwrapper(_test_script_func)