From ab3aa0cdd76d0de44e5566d6d81acd085546be1d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 21 Apr 2015 14:21:14 +0200 Subject: [PATCH 01/23] New dipy interfaces --- nipype/interfaces/dipy/__init__.py | 3 +- nipype/interfaces/dipy/base.py | 38 +++ nipype/interfaces/dipy/preprocess.py | 12 +- nipype/interfaces/dipy/reconstruction.py | 369 +++++++++++++++++++++++ nipype/interfaces/dipy/setup.py | 6 +- nipype/interfaces/dipy/tensors.py | 17 +- nipype/interfaces/dipy/tracks.py | 312 +++++++++++++++---- 7 files changed, 690 insertions(+), 67 deletions(-) create mode 100644 nipype/interfaces/dipy/base.py create mode 100644 nipype/interfaces/dipy/reconstruction.py diff --git a/nipype/interfaces/dipy/__init__.py b/nipype/interfaces/dipy/__init__.py index a147648d26..bcc756192d 100644 --- a/nipype/interfaces/dipy/__init__.py +++ b/nipype/interfaces/dipy/__init__.py @@ -1,3 +1,4 @@ -from .tracks import TrackDensityMap +from .tracks import StreamlineTractography, TrackDensityMap from .tensors import TensorMode from .preprocess import Resample, Denoise +from .reconstruction import RESTORE, EstimateResponseSH, CSD diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py new file mode 100644 index 0000000000..8529fbef59 --- /dev/null +++ b/nipype/interfaces/dipy/base.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + + +class DipyBaseInterfaceInputSpec(BaseInterfaceInputSpec): + in_file = File(exists=True, mandatory=True, desc=('input diffusion data')) + in_bval = File(exists=True, mandatory=True, desc=('input b-values table')) + in_bvec = File(exists=True, mandatory=True, desc=('input b-vectors table')) + out_prefix = traits.Str(desc=('output prefix for file names')) + + +class DipyBaseInterface(BaseInterface): + + """ + A base interface for py:mod:`dipy` computations + """ + input_spec = DipyBaseInterfaceInputSpec + + def _get_gradient_table(self): + gtab = GradientTable(np.loadtxt(self.inputs.in_bvec).T) + gtab.b0_threshold = 700 + gtab.bvals = np.loadtxt(self.inputs.in_bval) + return gtab + + def _gen_filename(self, name, ext=None): + fname, fext = op.splitext(op.basename(self.inputs.in_file)) + if fext == '.gz': + fname, fext2 = op.splitext(fname) + fext = fext2 + fext + + if not isdefined(self.inputs.out_prefix): + out_prefix = op.abspath(fname) + else: + out_prefix = self.inputs.out_prefix + + if ext is None: + ext = fext + + return out_prefix + '_' + name + ext diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 9a2119a86b..a379a9c42e 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -"""Change directory to provide relative paths for doctests +""" +Change directory to provide relative paths for doctests >>> import os >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) @@ -36,8 +37,9 @@ class ResampleInputSpec(TraitedSpec): ' is set, then isotropic regridding will ' 'be performed, with spacing equal to the ' 'smallest current zoom.')) - interp = traits.Int(1, mandatory=True, usedefault=True, desc=('order of ' - 'the interpolator (0 = nearest, 1 = linear, etc.')) + interp = traits.Int( + 1, mandatory=True, usedefault=True, + desc=('order of the interpolator (0 = nearest, 1 = linear, etc.')) class ResampleOutputSpec(TraitedSpec): @@ -45,6 +47,7 @@ class ResampleOutputSpec(TraitedSpec): class Resample(BaseInterface): + """ An interface to reslicing diffusion datasets. See @@ -106,6 +109,7 @@ class DenoiseOutputSpec(TraitedSpec): class Denoise(BaseInterface): + """ An interface to denoising diffusion datasets [Coupe2008]_. See @@ -152,7 +156,7 @@ def _run_interface(self, runtime): noise_mask=noise_mask, out_file=out_file) iflogger.info(('Denoised image saved as {i}, estimated ' - 'sigma={s}').format(i=out_file, s=s)) + 'sigma={s}').format(i=out_file, s=s)) return runtime def _list_outputs(self): diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py new file mode 100644 index 0000000000..3c8b798c38 --- /dev/null +++ b/nipype/interfaces/dipy/reconstruction.py @@ -0,0 +1,369 @@ + +# -*- coding: utf-8 -*- +import os +import os.path as op + +import numpy as np +from dipy.core.gradients import GradientTable +import nibabel as nb + +from nipype.interfaces.base import (TraitedSpec, File, InputMultiPath, + OutputMultiPath, Undefined, traits, + isdefined, OutputMultiPath, + CommandLineInputSpec, CommandLine, + BaseInterface, BaseInterfaceInputSpec, + traits) +from nipype.utils.filemanip import split_filename, fname_presuffix + +from .base import DipyBaseInterface + +from nipype import logging +iflogger = logging.getLogger('interface') + + +class RESTOREInputSpec(DipyBaseInterfaceInputSpec): + in_mask = File(exists=True, desc=('input mask in which compute tensors')) + noise_mask = File( + exists=True, desc=('input mask in which compute noise variance')) + + +class RESTOREOutputSpec(TraitedSpec): + fa = File(desc=('output fractional anisotropy (FA) map computed from ' + 'the fitted DTI')) + md = File(desc=('output mean diffusivity (MD) map computed from the ' + 'fitted DTI')) + rd = File(desc=('output radial diffusivity (RD) map computed from ' + 'the fitted DTI')) + mode = File(desc=('output mode (MO) map computed from the fitted DTI')) + trace = File(desc=('output the tensor trace map computed from the ' + 'fitted DTI')) + evals = File(desc=('output the eigenvalues of the fitted DTI')) + evecs = File(desc=('output the eigenvectors of the fitted DTI')) + # out_dti = File(desc=('output DTI file')) + + +class RESTORE(DipyBaseInterface): + + """ + Uses RESTORE [Chang2005]_ to perform DTI fitting with outlier detection. + The interface uses :py:mod:`dipy`, as explained in `dipy's documentation`_. + + .. [Chang2005] Chang, LC, Jones, DK and Pierpaoli, C. RESTORE: robust \ + estimation of tensors by outlier rejection. MRM, 53:1088-95, (2005). + + .. _dipy's documentation: \ + http://nipy.org/dipy/examples_built/restore_dti.html + + + Example + ------- + + >>> import pysdcev.interfaces.reconstruction as pir + >>> dti = pir.RESTORE() + >>> dti.inputs.in_file = 'dwi.nii' + >>> dti.inputs.in_bval = 'bval.txt' + >>> dti.inputs.in_bvec = 'bvec.txt' + >>> res = dti.run() # doctest: +SKIP + + + """ + input_spec = RESTOREInputSpec + output_spec = RESTOREOutputSpec + + def _run_interface(self, runtime): + from scipy.special import gamma + from dipy.reconst.dti import TensorModel + import gc + + img = nb.load(self.inputs.in_file) + hdr = img.get_header().copy() + affine = img.get_affine() + data = img.get_data() + gtab = self._get_gradient_table() + + if isdefined(self.inputs.in_mask): + msk = nb.load(self.inputs.in_mask).get_data().astype(np.uint8) + else: + msk = np.ones(data.shape[:3], dtype=np.uint8) + + try_b0 = True + if isdefined(self.inputs.noise_mask): + noise_msk = nb.load(self.inputs.noise_mask).get_data().reshape(-1) + noise_msk[noise_msk > 0.5] = 1 + noise_msk[noise_msk < 1.0] = 0 + noise_msk = noise_msk.astype(np.uint8) + try_b0 = False + elif np.all(data[msk == 0, 0] == 0): + iflogger.info('Input data are masked.') + noise_msk = msk.reshape(-1).astype(np.uint8) + else: + noise_msk = (1 - msk).reshape(-1).astype(np.uint8) + + nb0 = np.sum(gtab.b0s_mask) + dsample = data.reshape(-1, data.shape[-1]) + + if try_b0 and (nb0 > 1): + noise_data = dsample.take(np.where(gtab.b0s_mask), + axis=-1)[noise_msk == 0, ...] + n = nb0 + else: + nodiff = np.where(~gtab.b0s_mask) + nodiffidx = nodiff[0].tolist() + n = 20 if len(nodiffidx) >= 20 else len(nodiffidx) + idxs = np.random.choice(nodiffidx, size=n, replace=False) + noise_data = dsample.take(idxs, axis=-1)[noise_msk == 1, ...] + + # Estimate sigma required by RESTORE + mean_std = np.median(noise_data.std(-1)) + try: + bias = (1. - np.sqrt(2. / (n - 1)) * + (gamma(n / 2.) / gamma((n - 1) / 2.))) + except: + bias = .0 + pass + + sigma = mean_std * (1 + bias) + + if sigma == 0: + iflogger.warn( + ('Noise std is 0.0, looks like data was masked and noise' + ' cannot be estimated correctly. Using default tensor ' + 'model instead of RESTORE.')) + dti = TensorModel(gtab) + else: + iflogger.info(('Performing RESTORE with noise std=%.4f.') % sigma) + dti = TensorModel(gtab, fit_method='RESTORE', sigma=sigma) + + try: + fit_restore = dti.fit(data, msk) + except TypeError as e: + dti = TensorModel(gtab) + fit_restore = dti.fit(data, msk) + + hdr.set_data_dtype(np.float32) + hdr['data_type'] = 16 + + for k in self._outputs().get(): + scalar = getattr(fit_restore, k) + hdr.set_data_shape(np.shape(scalar)) + nb.Nifti1Image(scalar.astype(np.float32), + affine, hdr).to_filename(self._gen_filename(k)) + + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + for k in outputs.keys(): + outputs[k] = self._gen_filename(k) + return outputs + + +class EstimateResponseSHInputSpec(DipyBaseInterfaceInputSpec): + in_evals = File( + exists=True, mandatory=True, desc=('input eigenvalues file')) + in_mask = File( + exists=True, desc=('input mask in which we find single fibers')) + fa_thresh = traits.Float( + 0.7, usedefault=True, desc=('default FA threshold')) + save_glyph = traits.Bool(True, mandatory=True, usedefault=True, + desc=('save a png file of the response')) + response = File(desc=('the output response file')) + + +class EstimateResponseSHOutputSpec(TraitedSpec): + response = File(desc=('the response file')) + glyph_file = File(desc='graphical representation of the response') + + +class EstimateResponseSH(DipyBaseInterface): + + """ + Uses dipy to compute the single fiber response to be used in spherical + deconvolution methods, in a similar way to MRTrix's command + ``estimate_response``. + + + Example + ------- + + >>> import pysdcev.interfaces.reconstruction as pir + >>> dti = pir.EstimateResponseSH() + >>> dti.inputs.in_file = 'dwi.nii' + >>> dti.inputs.in_bval = 'bval.txt' + >>> dti.inputs.in_bvec = 'bvec.txt' + >>> dti.inputs.in_evals = 'dwi_evals.nii' + >>> res = dti.run() # doctest: +SKIP + + + """ + input_spec = EstimateResponseSHInputSpec + output_spec = EstimateResponseSHOutputSpec + + def _run_interface(self, runtime): + from dipy.core.gradients import GradientTable + from dipy.reconst.csdeconv import fractional_anisotropy + + img = nb.load(self.inputs.in_file) + affine = img.get_affine() + + if isdefined(self.inputs.in_mask): + msk = nb.load(self.inputs.in_mask).get_data() + msk[msk > 0] = 1 + msk[msk < 0] = 0 + else: + msk = np.ones(imref.get_shape()) + + data = img.get_data().astype(np.float32) + gtab = self._get_gradient_table() + + evals = nb.load(self.inputs.in_evals).get_data() + FA = fractional_anisotropy(evals) + FA[np.isnan(FA)] = 0 + FA[msk != 1] = 0 + + indices = np.where(FA > self.inputs.fa_thresh) + + lambdas = evals[indices][:, :2] + S0s = data[indices][:, np.nonzero(gtab.b0s_mask)[0]] + S0 = np.mean(S0s) + l01 = np.mean(lambdas, axis=0) + respev = np.array([l01[0], l01[1], l01[1]]) + response = (respev, S0) + ratio = respev[1] / respev[0] + + if abs(ratio - 0.2) > 0.1: + iflogger.warn(('Estimated response is not prolate enough. ' + 'Ratio=%0.3f.') % ratio) + + np.savetxt(self._gen_outname(), + np.array(respev.tolist() + [S0]).reshape(-1)) + + if isdefined(self.inputs.save_glyph) and self.inputs.save_glyph: + from dipy.viz import fvtk + from dipy.data import get_sphere + from dipy.sims.voxel import single_tensor_odf + + ren = fvtk.ren() + evecs = np.array([[0, 1, 0], [0, 0, 1], [1, 0, 0]]).T + sphere = get_sphere('symmetric724') + response_odf = single_tensor_odf(sphere.vertices, respev, evecs) + response_actor = fvtk.sphere_funcs(response_odf, sphere) + fvtk.add(ren, response_actor) + fvtk.record(ren, out_path=self._gen_outname() + '.png', + size=(200, 200)) + fvtk.rm(ren, response_actor) + return runtime + + def _gen_outname(self): + if isdefined(self.inputs.response): + return self.inputs.response + else: + fname, fext = op.splitext(op.basename(self.inputs.in_file)) + if fext == '.gz': + fname, fext2 = op.splitext(fname) + fext = fext2 + fext + return op.abspath(fname) + '_response.txt' + return out_file + + def _list_outputs(self): + outputs = self._outputs().get() + outputs['response'] = self._gen_outname() + + if isdefined(self.inputs.save_glyph) and self.inputs.save_glyph: + outputs['glyph_file'] = self._gen_outname() + '.png' + + return outputs + + +class CSDInputSpec(DipyBaseInterfaceInputSpec): + in_mask = File(exists=True, desc=('input mask in which compute tensors')) + response = File(exists=True, desc=('single fiber estimated response')) + sh_order = traits.Int(8, exists=True, usedefault=True, + desc=('maximal shperical harmonics order')) + save_fods = traits.Bool(True, exists=True, usedefault=True, + desc=('save fODFs in file')) + out_fods = File(desc=('fODFs output file name')) + + +class CSDOutputSpec(TraitedSpec): + model = File(desc='Python pickled object of the CSD model fitted.') + out_fods = File(desc=('fODFs output file name')) + + +class CSD(DipyBaseInterface): + + """ + Uses CSD [Tournier2007]_ to generate the fODF of DWIs. The interface uses + :py:mod:`dipy`, as explained in `dipy's CSD example + `_. + + .. [Tournier2007] Tournier, J.D., et al. NeuroImage 2007. + Robust determination of the fibre orientation distribution in diffusion + MRI: Non-negativity constrained super-resolved spherical deconvolution + + + Example + ------- + + >>> import pysdcev.interfaces.reconstruction as pir + >>> csd = pir.CSD() + >>> csd.inputs.in_file = 'dwi.nii' + >>> csd.inputs.in_bval = 'bval.txt' + >>> csd.inputs.in_bvec = 'bvec.txt' + >>> res = csd.run() # doctest: +SKIP + """ + input_spec = CSDInputSpec + output_spec = CSDOutputSpec + + def _run_interface(self, runtime): + from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel + from dipy.data import get_sphere + import cPickle as pickle + import gzip + + img = nb.load(self.inputs.in_file) + imref = nb.four_to_three(img)[0] + affine = img.get_affine() + + if isdefined(self.inputs.in_mask): + msk = nb.load(self.inputs.in_mask).get_data() + else: + msk = np.ones(imref.get_shape()) + + data = img.get_data().astype(np.float32) + hdr = imref.get_header().copy() + + gtab = self._get_gradient_table() + resp_file = np.loadtxt(self.inputs.response) + + response = (np.array(resp_file[0:3]), resp_file[-1]) + ratio = response[0][1] / response[0][0] + + if abs(ratio - 0.2) > 0.1: + iflogger.warn(('Estimated response is not prolate enough. ' + 'Ratio=%0.3f.') % ratio) + + csd_model = ConstrainedSphericalDeconvModel( + gtab, response, sh_order=self.inputs.sh_order) + + iflogger.info('Fitting CSD model') + csd_fit = csd_model.fit(data, msk) + + f = gzip.open(self._gen_filename('csdmodel', ext='.pklz'), 'wb') + pickle.dump(csd_model, f, -1) + f.close() + + if self.inputs.save_fods: + sphere = get_sphere('symmetric724') + fods = csd_fit.odf(sphere) + nb.Nifti1Image(fods.astype(np.float32), img.get_affine(), + None).to_filename(self._gen_filename('fods')) + + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs['model'] = self._gen_filename('csdmodel', ext='.pklz') + if self.inputs.save_fods: + outputs['out_fods'] = self._gen_filename('fods') + return outputs diff --git a/nipype/interfaces/dipy/setup.py b/nipype/interfaces/dipy/setup.py index f15c2ab086..6a7b5a30a4 100644 --- a/nipype/interfaces/dipy/setup.py +++ b/nipype/interfaces/dipy/setup.py @@ -1,11 +1,13 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -def configuration(parent_package='',top_path=None): + + +def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('dipy', parent_package, top_path) - #config.add_data_dir('tests') + # config.add_data_dir('tests') return config if __name__ == '__main__': diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index aa7caf92de..2edd98f4dc 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -44,6 +44,7 @@ class TensorModeOutputSpec(TraitedSpec): class TensorMode(BaseInterface): + """ Creates a map of the mode of the diffusion tensors given a set of diffusion-weighted images, as well as their associated b-values and @@ -69,31 +70,31 @@ class TensorMode(BaseInterface): output_spec = TensorModeOutputSpec def _run_interface(self, runtime): - ## Load the 4D image files + # Load the 4D image files img = nb.load(self.inputs.in_file) data = img.get_data() affine = img.get_affine() - ## Load the gradient strengths and directions + # Load the gradient strengths and directions bvals = np.loadtxt(self.inputs.bvals) gradients = np.loadtxt(self.inputs.bvecs).T - ## Place in Dipy's preferred format + # Place in Dipy's preferred format gtab = GradientTable(gradients) gtab.bvals = bvals - ## Mask the data so that tensors are not fit for - ## unnecessary voxels + # Mask the data so that tensors are not fit for + # unnecessary voxels mask = data[..., 0] > 50 - ## Fit the tensors to the data + # Fit the tensors to the data tenmodel = dti.TensorModel(gtab) tenfit = tenmodel.fit(data, mask) - ## Calculate the mode of each voxel's tensor + # Calculate the mode of each voxel's tensor mode_data = tenfit.mode - ## Write as a 3D Nifti image with the original affine + # Write as a 3D Nifti image with the original affine img = nb.Nifti1Image(mode_data, affine) out_file = op.abspath(self._gen_outfilename()) nb.save(img, out_file) diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 1b820d1c89..84e6612691 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -5,11 +5,13 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from nipype.interfaces.base import (TraitedSpec, BaseInterface, BaseInterfaceInputSpec, - File, isdefined, traits) +from nipype.interfaces.base import ( + TraitedSpec, BaseInterface, BaseInterfaceInputSpec, File, isdefined, + traits) from nipype.utils.filemanip import split_filename import os.path as op -import nibabel as nb, nibabel.trackvis as trk +import nibabel as nb +import nibabel.trackvis as trk from nipype.utils.misc import package_check import warnings @@ -27,60 +29,266 @@ class TrackDensityMapInputSpec(TraitedSpec): in_file = File(exists=True, mandatory=True, - desc='The input TrackVis track file') + desc='The input TrackVis track file') voxel_dims = traits.List(traits.Float, minlen=3, maxlen=3, - desc='The size of each voxel in mm.') + desc='The size of each voxel in mm.') data_dims = traits.List(traits.Int, minlen=3, maxlen=3, - desc='The size of the image in voxels.') - out_filename = File('tdi.nii', usedefault=True, desc='The output filename for the tracks in TrackVis (.trk) format') + desc='The size of the image in voxels.') + out_filename = File('tdi.nii', usedefault=True, + desc=('The output filename for the tracks in TrackVis' + ' (.trk) format')) + class TrackDensityMapOutputSpec(TraitedSpec): out_file = File(exists=True) + class TrackDensityMap(BaseInterface): - """ - Creates a tract density image from a TrackVis track file using functions from dipy - - Example - ------- - - >>> import nipype.interfaces.dipy as dipy - >>> trk2tdi = dipy.TrackDensityMap() - >>> trk2tdi.inputs.in_file = 'converted.trk' - >>> trk2tdi.run() # doctest: +SKIP - """ - input_spec = TrackDensityMapInputSpec - output_spec = TrackDensityMapOutputSpec - - def _run_interface(self, runtime): - tracks, header = trk.read(self.inputs.in_file) - if not isdefined(self.inputs.data_dims): - data_dims = header['dim'] - else: - data_dims = self.inputs.data_dims - - if not isdefined(self.inputs.voxel_dims): - voxel_size = header['voxel_size'] - else: - voxel_size = self.inputs.voxel_dims - - affine = header['vox_to_ras'] - - streams = ((ii[0]) for ii in tracks) - data = density_map(streams, data_dims, voxel_size) - if data.max() < 2**15: - data = data.astype('int16') - - img = nb.Nifti1Image(data,affine) - out_file = op.abspath(self.inputs.out_filename) - nb.save(img, out_file) - iflogger.info('Track density map saved as {i}'.format(i=out_file)) - iflogger.info('Data Dimensions {d}'.format(d=data_dims)) - iflogger.info('Voxel Dimensions {v}'.format(v=voxel_size)) - return runtime - - def _list_outputs(self): - outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_filename) - return outputs + """ + Creates a tract density image from a TrackVis track file using dipy + + Example + ------- + + >>> import nipype.interfaces.dipy as dipy + >>> trk2tdi = dipy.TrackDensityMap() + >>> trk2tdi.inputs.in_file = 'converted.trk' + >>> trk2tdi.run() # doctest: +SKIP + """ + input_spec = TrackDensityMapInputSpec + output_spec = TrackDensityMapOutputSpec + + def _run_interface(self, runtime): + tracks, header = trk.read(self.inputs.in_file) + if not isdefined(self.inputs.data_dims): + data_dims = header['dim'] + else: + data_dims = self.inputs.data_dims + + if not isdefined(self.inputs.voxel_dims): + voxel_size = header['voxel_size'] + else: + voxel_size = self.inputs.voxel_dims + + affine = header['vox_to_ras'] + + streams = ((ii[0]) for ii in tracks) + data = density_map(streams, data_dims, voxel_size) + if data.max() < 2 ** 15: + data = data.astype('int16') + + img = nb.Nifti1Image(data, affine) + out_file = op.abspath(self.inputs.out_filename) + nb.save(img, out_file) + iflogger.info('Track density map saved as {i}'.format(i=out_file)) + iflogger.info('Data Dimensions {d}'.format(d=data_dims)) + iflogger.info('Voxel Dimensions {v}'.format(v=voxel_size)) + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs['out_file'] = op.abspath(self.inputs.out_filename) + return outputs + + +class StreamlineTractographyInputSpec(BaseInterfaceInputSpec): + in_file = File(exists=True, mandatory=True, desc=('input diffusion data')) + in_model = File(exists=True, desc=('input f/d-ODF model extracted from.')) + tracking_mask = File(exists=True, + desc=('input mask within which perform tracking')) + seed_mask = File(exists=True, + desc=('input mask within which perform seeding')) + in_peaks = File(exists=True, desc=('peaks computed from the odf')) + seed_coord = File(exists=True, + desc=('file containing the list of seed voxel ' + 'coordinates (N,3)')) + gfa_thresh = traits.Float(0.2, mandatory=True, usedefault=True, + desc=('GFA threshold to compute tracking mask')) + peak_threshold = traits.Float( + 0.5, mandatory=True, usedefault=True, + desc=('threshold to consider peaks from model')) + min_angle = traits.Float(25.0, mandatory=True, usedefault=True, + desc=('minimum separation angle')) + multiprocess = traits.Bool(True, mandatory=True, usedefault=True, + desc=('use multiprocessing')) + save_seeds = traits.Bool(False, mandatory=True, usedefault=True, + desc=('save seeding voxels coordinates')) + num_seeds = traits.Int(10000, mandatory=True, usedefault=True, + desc=('desired number of tracks in tractography')) + out_prefix = traits.Str(desc=('output prefix for file names')) + + +class StreamlineTractographyOutputSpec(TraitedSpec): + tracks = File(desc='TrackVis file containing extracted streamlines') + gfa = File(desc=('The resulting GFA (generalized FA) computed using the ' + 'peaks of the ODF')) + odf_peaks = File(desc=('peaks computed from the odf')) + out_seeds = File(desc=('file containing the (N,3) *voxel* coordinates used' + ' in seeding.')) + + +class StreamlineTractography(BaseInterface): + + """ + Streamline tractography using EuDX [Garyfallidis12]_. + + .. [Garyfallidis12] Garyfallidis E., “Towards an accurate brain + tractography”, PhD thesis, University of Cambridge, 2012 + + Example + ------- + + >>> import pysdcev.interfaces.reconstruction as pir + >>> track = pir.StreamlineTractography() + >>> track.inputs.in_file = 'dwi.nii' + >>> track.inputs.in_model = 'model.pklz' + >>> track.inputs.tracking_mask = 'dilated_wm_mask.nii' + >>> res = track.run() # doctest: +SKIP + """ + input_spec = StreamlineTractographyInputSpec + output_spec = StreamlineTractographyOutputSpec + + def _run_interface(self, runtime): + from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel + from dipy.reconst.peaks import peaks_from_model + from dipy.tracking.eudx import EuDX + from dipy.data import get_sphere + import cPickle as pickle + import gzip + + if (not (isdefined(self.inputs.in_model) or + isdefined(self.inputs.in_peaks))): + raise RuntimeError(('At least one of in_model or in_peaks should ' + 'be supplied')) + + img = nb.load(self.inputs.in_file) + imref = nb.four_to_three(img)[0] + affine = img.get_affine() + + data = img.get_data().astype(np.float32) + hdr = imref.get_header().copy() + hdr.set_data_dtype(np.float32) + hdr['data_type'] = 16 + + sphere = get_sphere('symmetric724') + + self._save_peaks = False + if isdefined(self.inputs.in_peaks): + iflogger.info('Peaks file found, skipping ODF peaks search...') + f = gzip.open(self.inputs.in_peaks, 'rb') + peaks = pickle.load(f) + f.close() + else: + self._save_peaks = True + iflogger.info('Loading model and computing ODF peaks') + f = gzip.open(self.inputs.in_model, 'rb') + odf_model = pickle.load(f) + f.close() + + peaks = peaks_from_model( + model=odf_model, + data=data, + sphere=sphere, + relative_peak_threshold=self.inputs.peak_threshold, + min_separation_angle=self.inputs.min_angle, + parallel=self.inputs.multiprocess) + + f = gzip.open(self._gen_filename('peaks', ext='.pklz'), 'wb') + pickle.dump(peaks, f, -1) + f.close() + + hdr.set_data_shape(peaks.gfa.shape) + nb.Nifti1Image(peaks.gfa.astype(np.float32), affine, + hdr).to_filename(self._gen_filename('gfa')) + + iflogger.info('Performing tractography') + + if isdefined(self.inputs.tracking_mask): + msk = nb.load(self.inputs.tracking_mask).get_data() + msk[msk > 0] = 1 + msk[msk < 0] = 0 + else: + msk = np.ones(imref.get_shape()) + + gfa = peaks.gfa * msk + seeds = self.inputs.num_seeds + + if isdefined(self.inputs.seed_coord): + seeds = np.loadtxt(self.inputs.seed_coord) + + elif isdefined(self.inputs.seed_mask): + seedmsk = nb.load(self.inputs.seed_mask).get_data() + assert(seedmsk.shape == data.shape[:3]) + seedmsk[seedmsk > 0] = 1 + seedmsk[seedmsk < 1] = 0 + seedps = np.array(np.where(seedmsk == 1), dtype=np.float32).T + vseeds = seedps.shape[0] + nsperv = (seeds // vseeds) + 1 + iflogger.info(('Seed mask is provided (%d voxels inside ' + 'mask), computing seeds (%d seeds/voxel).') % + (vseeds, nsperv)) + if nsperv > 1: + iflogger.info(('Needed %d seeds per selected voxel ' + '(total %d).') % (nsperv, vseeds)) + seedps = np.vstack(np.array([seedps] * nsperv)) + voxcoord = seedps + np.random.uniform(-1, 1, size=seedps.shape) + nseeds = voxcoord.shape[0] + seeds = affine.dot(np.vstack((voxcoord.T, + np.ones((1, nseeds)))))[:3, :].T + + if self.inputs.save_seeds: + np.savetxt(self._gen_filename('seeds', ext='.txt'), seeds) + + if isdefined(self.inputs.tracking_mask): + tmask = msk + a_low = 0.1 + else: + tmask = gfa + a_low = self.inputs.gfa_thresh + + eu = EuDX(tmask, + peaks.peak_indices[..., 0], + seeds=seeds, + affine=affine, + odf_vertices=sphere.vertices, + a_low=a_low) + + ss_mm = [np.array(s) for s in eu] + + trkfilev = nb.trackvis.TrackvisFile([(s, None, None) for s in ss_mm], + points_space='rasmm', + affine=np.eye(4)) + trkfilev.to_file(self._gen_filename('tracked', ext='.trk')) + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs['tracks'] = self._gen_filename('tracked', ext='.trk') + outputs['gfa'] = self._gen_filename('gfa') + if self._save_peaks: + outputs['odf_peaks'] = self._gen_filename('peaks', ext='.pklz') + if self.inputs.save_seeds: + if isdefined(self.inputs.seed_coord): + outputs['out_seeds'] = self.inputs.seed_coord + else: + outputs['out_seeds'] = self._gen_filename('seeds', + ext='.txt') + + return outputs + + def _gen_filename(self, name, ext=None): + fname, fext = op.splitext(op.basename(self.inputs.in_file)) + if fext == '.gz': + fname, fext2 = op.splitext(fname) + fext = fext2 + fext + + if not isdefined(self.inputs.out_prefix): + out_prefix = op.abspath(fname) + else: + out_prefix = self.inputs.out_prefix + + if ext is None: + ext = fext + + return out_prefix + '_' + name + ext From 8b8712ef404cd9d1504a27076abb9560ac876515 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 21 Apr 2015 14:44:43 +0200 Subject: [PATCH 02/23] fix doctests --- nipype/interfaces/base.py | 6 +- nipype/interfaces/dipy/base.py | 2 + nipype/interfaces/dipy/reconstruction.py | 21 ++++--- nipype/interfaces/dipy/tests/test_auto_CSD.py | 41 ++++++++++++++ .../dipy/tests/test_auto_DipyBaseInterface.py | 22 ++++++++ .../tests/test_auto_EstimateResponseSH.py | 41 ++++++++++++++ .../dipy/tests/test_auto_RESTORE.py | 39 +++++++++++++ .../tests/test_auto_StreamlineTractography.py | 53 ++++++++++++++++++ nipype/interfaces/dipy/tracks.py | 2 +- nipype/testing/data/dilated_wm_mask.nii | 0 nipype/testing/data/dwi_evals.nii | 0 nipype/testing/data/model.pklz | 0 nipype/testing/data/von-ray_errmap.nii.gz | Bin 107 -> 107 bytes nipype/testing/data/von_errmap.nii.gz | Bin 96 -> 96 bytes nipype/utils/nipype_cmd.py | 14 ++--- nipype/utils/tests/test_cmd.py | 28 ++++----- 16 files changed, 233 insertions(+), 36 deletions(-) create mode 100644 nipype/interfaces/dipy/tests/test_auto_CSD.py create mode 100644 nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py create mode 100644 nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py create mode 100644 nipype/interfaces/dipy/tests/test_auto_RESTORE.py create mode 100644 nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py create mode 100644 nipype/testing/data/dilated_wm_mask.nii create mode 100644 nipype/testing/data/dwi_evals.nii create mode 100644 nipype/testing/data/model.pklz diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 7917d6e852..81100b9fbd 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -1349,9 +1349,9 @@ def _terminal_output_update(self): def set_default_terminal_output(cls, output_type): """Set the default terminal output for CommandLine Interfaces. - This method is used to set default terminal output for - CommandLine Interfaces. However, setting this will not - update the output type for any existing instances. For these, + This method is used to set default terminal output for + CommandLine Interfaces. However, setting this will not + update the output type for any existing instances. For these, assign the .inputs.terminal_output. """ diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index 8529fbef59..03a0ea49e1 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +from nipype.interfaces.base import (traits, File, isdefined, + BaseInterface, BaseInterfaceInputSpec) class DipyBaseInterfaceInputSpec(BaseInterfaceInputSpec): diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 3c8b798c38..2354d8b7af 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -15,7 +15,7 @@ traits) from nipype.utils.filemanip import split_filename, fname_presuffix -from .base import DipyBaseInterface +from .base import DipyBaseInterface, DipyBaseInterfaceInputSpec from nipype import logging iflogger = logging.getLogger('interface') @@ -39,7 +39,6 @@ class RESTOREOutputSpec(TraitedSpec): 'fitted DTI')) evals = File(desc=('output the eigenvalues of the fitted DTI')) evecs = File(desc=('output the eigenvectors of the fitted DTI')) - # out_dti = File(desc=('output DTI file')) class RESTORE(DipyBaseInterface): @@ -60,9 +59,9 @@ class RESTORE(DipyBaseInterface): >>> import pysdcev.interfaces.reconstruction as pir >>> dti = pir.RESTORE() - >>> dti.inputs.in_file = 'dwi.nii' - >>> dti.inputs.in_bval = 'bval.txt' - >>> dti.inputs.in_bvec = 'bvec.txt' + >>> dti.inputs.in_file = '4d_dwi.nii' + >>> dti.inputs.in_bval = 'bvals' + >>> dti.inputs.in_bvec = 'bvecs' >>> res = dti.run() # doctest: +SKIP @@ -188,9 +187,9 @@ class EstimateResponseSH(DipyBaseInterface): >>> import pysdcev.interfaces.reconstruction as pir >>> dti = pir.EstimateResponseSH() - >>> dti.inputs.in_file = 'dwi.nii' - >>> dti.inputs.in_bval = 'bval.txt' - >>> dti.inputs.in_bvec = 'bvec.txt' + >>> dti.inputs.in_file = '4d_dwi.nii' + >>> dti.inputs.in_bval = 'bvals' + >>> dti.inputs.in_bvec = 'bvecs' >>> dti.inputs.in_evals = 'dwi_evals.nii' >>> res = dti.run() # doctest: +SKIP @@ -307,9 +306,9 @@ class CSD(DipyBaseInterface): >>> import pysdcev.interfaces.reconstruction as pir >>> csd = pir.CSD() - >>> csd.inputs.in_file = 'dwi.nii' - >>> csd.inputs.in_bval = 'bval.txt' - >>> csd.inputs.in_bvec = 'bvec.txt' + >>> csd.inputs.in_file = '4d_dwi.nii' + >>> csd.inputs.in_bval = 'bvals' + >>> csd.inputs.in_bvec = 'bvecs' >>> res = csd.run() # doctest: +SKIP """ input_spec = CSDInputSpec diff --git a/nipype/interfaces/dipy/tests/test_auto_CSD.py b/nipype/interfaces/dipy/tests/test_auto_CSD.py new file mode 100644 index 0000000000..541a1c5a86 --- /dev/null +++ b/nipype/interfaces/dipy/tests/test_auto_CSD.py @@ -0,0 +1,41 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from nipype.testing import assert_equal +from nipype.interfaces.dipy.reconstruction import CSD + +def test_CSD_inputs(): + input_map = dict(ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_bval=dict(mandatory=True, + ), + in_bvec=dict(mandatory=True, + ), + in_file=dict(mandatory=True, + ), + in_mask=dict(), + out_fods=dict(), + out_prefix=dict(), + response=dict(), + save_fods=dict(exists=True, + usedefault=True, + ), + sh_order=dict(exists=True, + usedefault=True, + ), + ) + inputs = CSD.input_spec() + + for key, metadata in input_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + +def test_CSD_outputs(): + output_map = dict(model=dict(), + out_fods=dict(), + ) + outputs = CSD.output_spec() + + for key, metadata in output_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(outputs.traits()[key], metakey), value + diff --git a/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py b/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py new file mode 100644 index 0000000000..9a5eb30830 --- /dev/null +++ b/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py @@ -0,0 +1,22 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from nipype.testing import assert_equal +from nipype.interfaces.dipy.base import DipyBaseInterface + +def test_DipyBaseInterface_inputs(): + input_map = dict(ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_bval=dict(mandatory=True, + ), + in_bvec=dict(mandatory=True, + ), + in_file=dict(mandatory=True, + ), + out_prefix=dict(), + ) + inputs = DipyBaseInterface.input_spec() + + for key, metadata in input_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + diff --git a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py new file mode 100644 index 0000000000..8c4d6d694c --- /dev/null +++ b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py @@ -0,0 +1,41 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from nipype.testing import assert_equal +from nipype.interfaces.dipy.reconstruction import EstimateResponseSH + +def test_EstimateResponseSH_inputs(): + input_map = dict(fa_thresh=dict(usedefault=True, + ), + ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_bval=dict(mandatory=True, + ), + in_bvec=dict(mandatory=True, + ), + in_evals=dict(mandatory=True, + ), + in_file=dict(mandatory=True, + ), + in_mask=dict(), + out_prefix=dict(), + response=dict(), + save_glyph=dict(mandatory=True, + usedefault=True, + ), + ) + inputs = EstimateResponseSH.input_spec() + + for key, metadata in input_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + +def test_EstimateResponseSH_outputs(): + output_map = dict(glyph_file=dict(), + response=dict(), + ) + outputs = EstimateResponseSH.output_spec() + + for key, metadata in output_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(outputs.traits()[key], metakey), value + diff --git a/nipype/interfaces/dipy/tests/test_auto_RESTORE.py b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py new file mode 100644 index 0000000000..a09fdd855a --- /dev/null +++ b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py @@ -0,0 +1,39 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from nipype.testing import assert_equal +from nipype.interfaces.dipy.reconstruction import RESTORE + +def test_RESTORE_inputs(): + input_map = dict(ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_bval=dict(mandatory=True, + ), + in_bvec=dict(mandatory=True, + ), + in_file=dict(mandatory=True, + ), + in_mask=dict(), + noise_mask=dict(), + out_prefix=dict(), + ) + inputs = RESTORE.input_spec() + + for key, metadata in input_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + +def test_RESTORE_outputs(): + output_map = dict(evals=dict(), + evecs=dict(), + fa=dict(), + md=dict(), + mode=dict(), + rd=dict(), + trace=dict(), + ) + outputs = RESTORE.output_spec() + + for key, metadata in output_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(outputs.traits()[key], metakey), value + diff --git a/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py b/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py new file mode 100644 index 0000000000..0a910010c4 --- /dev/null +++ b/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py @@ -0,0 +1,53 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from nipype.testing import assert_equal +from nipype.interfaces.dipy.tracks import StreamlineTractography + +def test_StreamlineTractography_inputs(): + input_map = dict(gfa_thresh=dict(mandatory=True, + usedefault=True, + ), + ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_file=dict(mandatory=True, + ), + in_model=dict(), + in_peaks=dict(), + min_angle=dict(mandatory=True, + usedefault=True, + ), + multiprocess=dict(mandatory=True, + usedefault=True, + ), + num_seeds=dict(mandatory=True, + usedefault=True, + ), + out_prefix=dict(), + peak_threshold=dict(mandatory=True, + usedefault=True, + ), + save_seeds=dict(mandatory=True, + usedefault=True, + ), + seed_coord=dict(), + seed_mask=dict(), + tracking_mask=dict(), + ) + inputs = StreamlineTractography.input_spec() + + for key, metadata in input_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + +def test_StreamlineTractography_outputs(): + output_map = dict(gfa=dict(), + odf_peaks=dict(), + out_seeds=dict(), + tracks=dict(), + ) + outputs = StreamlineTractography.output_spec() + + for key, metadata in output_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(outputs.traits()[key], metakey), value + diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 84e6612691..30450aa1e5 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -141,7 +141,7 @@ class StreamlineTractography(BaseInterface): >>> import pysdcev.interfaces.reconstruction as pir >>> track = pir.StreamlineTractography() - >>> track.inputs.in_file = 'dwi.nii' + >>> track.inputs.in_file = '4d_dwi.nii' >>> track.inputs.in_model = 'model.pklz' >>> track.inputs.tracking_mask = 'dilated_wm_mask.nii' >>> res = track.run() # doctest: +SKIP diff --git a/nipype/testing/data/dilated_wm_mask.nii b/nipype/testing/data/dilated_wm_mask.nii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/dwi_evals.nii b/nipype/testing/data/dwi_evals.nii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/model.pklz b/nipype/testing/data/model.pklz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/von-ray_errmap.nii.gz b/nipype/testing/data/von-ray_errmap.nii.gz index de7d3985d377326cb89a150464b003a61b902ddf..8d262b99bb61a706e94bfbb172aacb7e09e8948d 100644 GIT binary patch delta 14 Vcmd1K=8*5^;HYvj3!TW33;-N>1Kt1t delta 14 Vcmd1K=8*5^;8=g+dB{YLWB?>|1xEk? diff --git a/nipype/testing/data/von_errmap.nii.gz b/nipype/testing/data/von_errmap.nii.gz index 8e79a46d128e3181c2aaa8c32b84fbd0902476bc..e1d170e882f5d35cdec1c9a1da95a92c8c9fde40 100644 GIT binary patch delta 14 VcmYdD;E?a;;HYvj3!TUj0RS6B1HAwM delta 14 VcmYdD;E?a;;8=g+dB{YL2mm8M1ttIh diff --git a/nipype/utils/nipype_cmd.py b/nipype/utils/nipype_cmd.py index 2b514e54d8..e2ae758354 100644 --- a/nipype/utils/nipype_cmd.py +++ b/nipype/utils/nipype_cmd.py @@ -19,7 +19,7 @@ def add_options(parser=None, module=None, function=None): if parser and module and function: __import__(module) interface = getattr(sys.modules[module],function)() - + inputs = interface.input_spec() for name, spec in sorted(interface.inputs.traits(transient=None).items()): desc = "\n".join(interface._get_trait_desc(inputs, name, spec))[len(name)+2:] @@ -33,7 +33,7 @@ def add_options(parser=None, module=None, function=None): def run_instance(interface, options): if interface: print "setting function inputs" - + for input_name, _ in interface.inputs.items(): if getattr(options, input_name) != None: value = getattr(options, input_name) @@ -48,23 +48,23 @@ def run_instance(interface, options): value) except ValueError, e: print "Error when setting the value of %s: '%s'"%(input_name, str(e)) - + print interface.inputs res = interface.run() - print res.outputs + print res.outputs def main(argv): - + if len(argv) == 2 and not argv[1].startswith("-"): listClasses(argv[1]) sys.exit(0) - + parser = argparse.ArgumentParser(description='Nipype interface runner', prog=argv[0]) parser.add_argument("module", type=str, help="Module name") parser.add_argument("interface", type=str, help="Interface name") parsed = parser.parse_args(args=argv[1:3]) - + _, prog = os.path.split(argv[0]) interface_parser = argparse.ArgumentParser(description="Run %s"%parsed.interface, prog=" ".join([prog] + argv[1:3])) interface_parser, interface = add_options(interface_parser, parsed.module, parsed.interface) diff --git a/nipype/utils/tests/test_cmd.py b/nipype/utils/tests/test_cmd.py index 55347ff4b9..894569ded3 100644 --- a/nipype/utils/tests/test_cmd.py +++ b/nipype/utils/tests/test_cmd.py @@ -22,24 +22,24 @@ def test_main_returns_2_on_empty(self): with self.assertRaises(SystemExit) as cm: with capture_sys_output() as (stdout, stderr): nipype_cmd.main(['nipype_cmd']) - + exit_exception = cm.exception self.assertEqual(exit_exception.code, 2) - - self.assertEqual(stderr.getvalue(), + + self.assertEqual(stderr.getvalue(), """usage: nipype_cmd [-h] module interface nipype_cmd: error: too few arguments """) self.assertEqual(stdout.getvalue(), '') - + def test_main_returns_0_on_help(self): with self.assertRaises(SystemExit) as cm: with capture_sys_output() as (stdout, stderr): nipype_cmd.main(['nipype_cmd', '-h']) - + exit_exception = cm.exception self.assertEqual(exit_exception.code, 0) - + self.assertEqual(stderr.getvalue(), '') self.assertEqual(stdout.getvalue(), """usage: nipype_cmd [-h] module interface @@ -53,15 +53,15 @@ def test_main_returns_0_on_help(self): optional arguments: -h, --help show this help message and exit """) - + def test_list_nipy_interfacesp(self): with self.assertRaises(SystemExit) as cm: with capture_sys_output() as (stdout, stderr): nipype_cmd.main(['nipype_cmd', 'nipype.interfaces.nipy']) - + exit_exception = cm.exception self.assertEqual(exit_exception.code, 0) - + self.assertEqual(stderr.getvalue(), '') self.assertEqual(stdout.getvalue(), """Available Interfaces: @@ -77,10 +77,10 @@ def test_run_4d_realign_without_arguments(self): with self.assertRaises(SystemExit) as cm: with capture_sys_output() as (stdout, stderr): nipype_cmd.main(['nipype_cmd', 'nipype.interfaces.nipy', 'FmriRealign4d']) - + exit_exception = cm.exception self.assertEqual(exit_exception.code, 2) - + self.assertEqual(stderr.getvalue(), """usage: nipype_cmd nipype.interfaces.nipy FmriRealign4d [-h] [--between_loops BETWEEN_LOOPS] @@ -95,15 +95,15 @@ def test_run_4d_realign_without_arguments(self): nipype_cmd nipype.interfaces.nipy FmriRealign4d: error: too few arguments """) self.assertEqual(stdout.getvalue(), '') - + def test_run_4d_realign_help(self): with self.assertRaises(SystemExit) as cm: with capture_sys_output() as (stdout, stderr): nipype_cmd.main(['nipype_cmd', 'nipype.interfaces.nipy', 'FmriRealign4d', '-h']) - + exit_exception = cm.exception self.assertEqual(exit_exception.code, 0) - + self.assertEqual(stderr.getvalue(), '') self.assertTrue("Run FmriRealign4d" in stdout.getvalue()) From 7256dc70532574d5326ac2484a884b83e0ae5ee9 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 21 Apr 2015 14:48:51 +0200 Subject: [PATCH 03/23] removed old code from doctests --- nipype/interfaces/dipy/reconstruction.py | 12 ++++++------ nipype/interfaces/dipy/tracks.py | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 2354d8b7af..1bfb427506 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -57,8 +57,8 @@ class RESTORE(DipyBaseInterface): Example ------- - >>> import pysdcev.interfaces.reconstruction as pir - >>> dti = pir.RESTORE() + >>> from nipype.interfaces import dipy as ndp + >>> dti = ndp.RESTORE() >>> dti.inputs.in_file = '4d_dwi.nii' >>> dti.inputs.in_bval = 'bvals' >>> dti.inputs.in_bvec = 'bvecs' @@ -185,8 +185,8 @@ class EstimateResponseSH(DipyBaseInterface): Example ------- - >>> import pysdcev.interfaces.reconstruction as pir - >>> dti = pir.EstimateResponseSH() + >>> from nipype.interfaces import dipy as ndp + >>> dti = ndp.EstimateResponseSH() >>> dti.inputs.in_file = '4d_dwi.nii' >>> dti.inputs.in_bval = 'bvals' >>> dti.inputs.in_bvec = 'bvecs' @@ -304,8 +304,8 @@ class CSD(DipyBaseInterface): Example ------- - >>> import pysdcev.interfaces.reconstruction as pir - >>> csd = pir.CSD() + >>> from nipype.interfaces import dipy as ndp + >>> csd = ndp.CSD() >>> csd.inputs.in_file = '4d_dwi.nii' >>> csd.inputs.in_bval = 'bvals' >>> csd.inputs.in_bvec = 'bvecs' diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 30450aa1e5..2917a07021 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -139,8 +139,8 @@ class StreamlineTractography(BaseInterface): Example ------- - >>> import pysdcev.interfaces.reconstruction as pir - >>> track = pir.StreamlineTractography() + >>> from nipype.interfaces import dipy as ndp + >>> track = ndp.StreamlineTractography() >>> track.inputs.in_file = '4d_dwi.nii' >>> track.inputs.in_model = 'model.pklz' >>> track.inputs.tracking_mask = 'dilated_wm_mask.nii' From 905e47975561a10e4342375913ec3c0810296ea4 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 21 Apr 2015 14:53:47 +0200 Subject: [PATCH 04/23] update CHANGES --- CHANGES | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGES b/CHANGES index 6ea2883c72..c2406b5b30 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,8 @@ Next release ============ +* ENH: New interfaces in dipy: RESTORE, EstimateResponseSH, CSD and StreamlineTractography + (https://github.com/nipy/nipype/pull/1090) * ENH: New mesh.MeshWarpMaths to operate on surface-defined warpings (https://github.com/nipy/nipype/pull/1016) * FIX: Refactor P2PDistance, change name to ComputeMeshWarp, add regression tests, From 7d05e3c6017202b380aacf0c0eb7354d9bebefd1 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 22 Apr 2015 18:40:00 +0200 Subject: [PATCH 05/23] improve gradient table building --- nipype/interfaces/dipy/base.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index 03a0ea49e1..6c539758a0 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -7,6 +7,7 @@ class DipyBaseInterfaceInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc=('input diffusion data')) in_bval = File(exists=True, mandatory=True, desc=('input b-values table')) in_bvec = File(exists=True, mandatory=True, desc=('input b-vectors table')) + b0_thres = traits.Int(700, usedefault=True, desc=('b0 threshold')) out_prefix = traits.Str(desc=('output prefix for file names')) @@ -18,9 +19,17 @@ class DipyBaseInterface(BaseInterface): input_spec = DipyBaseInterfaceInputSpec def _get_gradient_table(self): - gtab = GradientTable(np.loadtxt(self.inputs.in_bvec).T) - gtab.b0_threshold = 700 - gtab.bvals = np.loadtxt(self.inputs.in_bval) + bval = np.loadtxt(self.inputs.in_bval) + bvec = np.loadtxt(self.inputs.in_bvec).T + try: + from dipy.data import GradientTable + gtab = GradientTable(bvec) + gtab.bvals = bval + except NameError: + from dipy.core.gradients import gradient_table + gtab = gradient_table(bval, bvec) + + gtab.b0_threshold = self.inputs.b0_thres return gtab def _gen_filename(self, name, ext=None): From 04c34f3cdef190d0700ecb63e4d33693ccdd68eb Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 22 Apr 2015 18:44:36 +0200 Subject: [PATCH 06/23] add missing imports --- nipype/interfaces/dipy/base.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index 6c539758a0..0d29b3f371 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +import numpy as np +import os.path as op from nipype.interfaces.base import (traits, File, isdefined, BaseInterface, BaseInterfaceInputSpec) From 1ed1a675ae7ebe3a7f3d9885880c0a8e3550528a Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 23 Apr 2015 17:18:31 +0200 Subject: [PATCH 07/23] replace cPickle with marshal --- nipype/interfaces/dipy/reconstruction.py | 4 ++-- nipype/interfaces/dipy/tracks.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 1bfb427506..ff1ba9fdc0 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- import os import os.path as op @@ -317,7 +316,8 @@ class CSD(DipyBaseInterface): def _run_interface(self, runtime): from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.data import get_sphere - import cPickle as pickle + import marshal as pickle + # import cPickle as pickle import gzip img = nb.load(self.inputs.in_file) diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 2917a07021..92ff2005b5 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -150,11 +150,11 @@ class StreamlineTractography(BaseInterface): output_spec = StreamlineTractographyOutputSpec def _run_interface(self, runtime): - from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.reconst.peaks import peaks_from_model from dipy.tracking.eudx import EuDX from dipy.data import get_sphere - import cPickle as pickle + import marshal as pickle + # import cPickle as pickle import gzip if (not (isdefined(self.inputs.in_model) or From d630f975ca532227acfd4d2ca2455d15652c7d46 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 23 Apr 2015 17:45:13 +0200 Subject: [PATCH 08/23] save_glyph should be False by default --- nipype/interfaces/dipy/reconstruction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index ff1ba9fdc0..bc1f21569c 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -163,7 +163,7 @@ class EstimateResponseSHInputSpec(DipyBaseInterfaceInputSpec): exists=True, desc=('input mask in which we find single fibers')) fa_thresh = traits.Float( 0.7, usedefault=True, desc=('default FA threshold')) - save_glyph = traits.Bool(True, mandatory=True, usedefault=True, + save_glyph = traits.Bool(False, usedefault=True, desc=('save a png file of the response')) response = File(desc=('the output response file')) @@ -236,7 +236,7 @@ def _run_interface(self, runtime): np.savetxt(self._gen_outname(), np.array(respev.tolist() + [S0]).reshape(-1)) - if isdefined(self.inputs.save_glyph) and self.inputs.save_glyph: + if self.inputs.save_glyph: from dipy.viz import fvtk from dipy.data import get_sphere from dipy.sims.voxel import single_tensor_odf From 8673d31a80bb519a23b090db20aa91d7b5ef0650 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 24 Apr 2015 10:57:43 +0200 Subject: [PATCH 09/23] extend noise mask to number of time steps --- nipype/interfaces/dipy/preprocess.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index a379a9c42e..88cdfb4239 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -230,7 +230,8 @@ def nlmeans_proxy(in_file, settings, nmask = data[..., 0] > 80 if noise_mask is not None: - nmask = noise_mask > 0 + nmask = np.array([noise_mask] * data.shape[-1]) + nmask[nmask > 0] = 1 sigma = np.std(data[nmask == 1]) den = nlmeans(data, sigma, **settings) From 71f1723910d2798f28ae8b33d6775fc7d97616df Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 24 Apr 2015 11:03:42 +0200 Subject: [PATCH 10/23] extend noise mask to number of time steps (ammend) --- nipype/interfaces/dipy/preprocess.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 88cdfb4239..bedd68d71a 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -230,7 +230,10 @@ def nlmeans_proxy(in_file, settings, nmask = data[..., 0] > 80 if noise_mask is not None: - nmask = np.array([noise_mask] * data.shape[-1]) + nmask = nb.load(noise_mask).get_data() + if nmask.ndim != data.ndim: + nmask = np.array([nmask] * data.shape[-1]) + nmask[nmask > 0] = 1 sigma = np.std(data[nmask == 1]) From eca025148cddf3dcf8e742a096a8ea26a87a289f Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 24 Apr 2015 11:22:44 +0200 Subject: [PATCH 11/23] extend noise mask to number of time steps (ammend 2) --- nipype/interfaces/dipy/preprocess.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index bedd68d71a..de98513b95 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -230,9 +230,8 @@ def nlmeans_proxy(in_file, settings, nmask = data[..., 0] > 80 if noise_mask is not None: - nmask = nb.load(noise_mask).get_data() - if nmask.ndim != data.ndim: - nmask = np.array([nmask] * data.shape[-1]) + if noise_mask.ndim != data.ndim: + nmask = np.array([noise_mask] * data.shape[-1]) nmask[nmask > 0] = 1 From 916b305e1ea8deeb72cb52d3fe980191ba807d87 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 25 Apr 2015 20:01:20 +0200 Subject: [PATCH 12/23] fixing noise_mask in Denoise --- nipype/interfaces/dipy/preprocess.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index de98513b95..b59267d8ad 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -230,12 +230,13 @@ def nlmeans_proxy(in_file, settings, nmask = data[..., 0] > 80 if noise_mask is not None: - if noise_mask.ndim != data.ndim: - nmask = np.array([noise_mask] * data.shape[-1]) + noise_mask = np.squeeze(noise_mask) + nmask = np.zeros_like(noise_mask) + nmask[noise_mask > 0] = 1 + if nmask.ndim != data.ndim: + nmask = np.array([nmask] * data.shape[-1]) - nmask[nmask > 0] = 1 - - sigma = np.std(data[nmask == 1]) + sigma = np.std(data[nmask > 0]) den = nlmeans(data, sigma, **settings) nb.Nifti1Image(den.astype(hdr.get_data_dtype()), aff, From db6fdf93d015f0410b049e84d4464d2051ae056c Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 28 Apr 2015 00:58:39 +0200 Subject: [PATCH 13/23] update denoise, improve estimation --- nipype/interfaces/dipy/preprocess.py | 61 +++++++++++++------ nipype/interfaces/dipy/tests/test_auto_CSD.py | 4 +- .../dipy/tests/test_auto_Denoise.py | 1 + .../dipy/tests/test_auto_DipyBaseInterface.py | 4 +- .../tests/test_auto_EstimateResponseSH.py | 7 ++- .../dipy/tests/test_auto_RESTORE.py | 4 +- 6 files changed, 57 insertions(+), 24 deletions(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index b59267d8ad..c17e2eac29 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -98,6 +98,8 @@ class DenoiseInputSpec(TraitedSpec): noise_model = traits.Enum('rician', 'gaussian', mandatory=True, usedefault=True, desc=('noise distribution model')) + signal_mask = File(desc=('mask in which the mean signal ' + 'will be computed'), exists=True) noise_mask = File(desc=('mask in which the standard deviation of noise ' 'will be computed'), exists=True) patch_radius = traits.Int(1, desc='patch radius') @@ -147,16 +149,19 @@ def _run_interface(self, runtime): if isdefined(self.inputs.block_radius): settings['block_radius'] = self.inputs.block_radius + signal_mask = None + if isdefined(self.inputs.signal_mask): + signal_mask = nb.load(self.inputs.signal_mask).get_data() noise_mask = None - if isdefined(self.inputs.in_mask): + if isdefined(self.inputs.noise_mask): noise_mask = nb.load(self.inputs.noise_mask).get_data() - _, s = nlmeans_proxy(self.inputs.in_file, - settings, - noise_mask=noise_mask, + _, s = nlmeans_proxy(self.inputs.in_file, settings, + smask=signal_mask, + nmask=noise_mask, out_file=out_file) iflogger.info(('Denoised image saved as {i}, estimated ' - 'sigma={s}').format(i=out_file, s=s)) + 'SNR={s}').format(i=out_file, s=str(s))) return runtime def _list_outputs(self): @@ -209,7 +214,9 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): def nlmeans_proxy(in_file, settings, - noise_mask=None, out_file=None): + smask=None, + nmask=None, + out_file=None): """ Uses non-local means to denoise 4D datasets """ @@ -228,17 +235,35 @@ def nlmeans_proxy(in_file, settings, data = img.get_data() aff = img.get_affine() - nmask = data[..., 0] > 80 - if noise_mask is not None: - noise_mask = np.squeeze(noise_mask) - nmask = np.zeros_like(noise_mask) - nmask[noise_mask > 0] = 1 - if nmask.ndim != data.ndim: - nmask = np.array([nmask] * data.shape[-1]) - - sigma = np.std(data[nmask > 0]) - den = nlmeans(data, sigma, **settings) - + if data.ndims < 4: + data = data[..., np.newaxis] + b0 = data[..., 0] + + if smask is None: + smask = np.zeros_like(b0) + smask[b0 > np.percentile(b0, 0.85)] = 1 + + if nmask is None: + nmask = np.zeros_like(b0) + try: + bmask = settings['mask'] + nmask[~bmask] = 1 + except AttributeError: + nmask[b0 < np.percentile(b0, 0.15)] = 1 + else: + nmask = np.squeeze(nmask) + nmask[nmask > 0] = 1 + + den = np.zeros_like(data) + snr = [] + for i in range(data.shape[-1]): + d = data[..., i] + s = np.mean(d[smask > 0]) + n = np.std(d[nmask > 0]) + snr.append(s/n) + den[..., i] = nlmeans(d, s/n, **settings) + + den = np.squeeze(den) nb.Nifti1Image(den.astype(hdr.get_data_dtype()), aff, hdr).to_filename(out_file) - return out_file, sigma + return out_file, snr diff --git a/nipype/interfaces/dipy/tests/test_auto_CSD.py b/nipype/interfaces/dipy/tests/test_auto_CSD.py index 541a1c5a86..f2339fed2a 100644 --- a/nipype/interfaces/dipy/tests/test_auto_CSD.py +++ b/nipype/interfaces/dipy/tests/test_auto_CSD.py @@ -3,7 +3,9 @@ from nipype.interfaces.dipy.reconstruction import CSD def test_CSD_inputs(): - input_map = dict(ignore_exception=dict(nohash=True, + input_map = dict(b0_thres=dict(usedefault=True, + ), + ignore_exception=dict(nohash=True, usedefault=True, ), in_bval=dict(mandatory=True, diff --git a/nipype/interfaces/dipy/tests/test_auto_Denoise.py b/nipype/interfaces/dipy/tests/test_auto_Denoise.py index 60f284137b..4e70b4bd0a 100644 --- a/nipype/interfaces/dipy/tests/test_auto_Denoise.py +++ b/nipype/interfaces/dipy/tests/test_auto_Denoise.py @@ -12,6 +12,7 @@ def test_Denoise_inputs(): usedefault=True, ), patch_radius=dict(), + signal_mask=dict(), ) inputs = Denoise.input_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py b/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py index 9a5eb30830..4b0fed9022 100644 --- a/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py +++ b/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py @@ -3,7 +3,9 @@ from nipype.interfaces.dipy.base import DipyBaseInterface def test_DipyBaseInterface_inputs(): - input_map = dict(ignore_exception=dict(nohash=True, + input_map = dict(b0_thres=dict(usedefault=True, + ), + ignore_exception=dict(nohash=True, usedefault=True, ), in_bval=dict(mandatory=True, diff --git a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py index 8c4d6d694c..117fb15ee2 100644 --- a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py +++ b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py @@ -3,7 +3,9 @@ from nipype.interfaces.dipy.reconstruction import EstimateResponseSH def test_EstimateResponseSH_inputs(): - input_map = dict(fa_thresh=dict(usedefault=True, + input_map = dict(b0_thres=dict(usedefault=True, + ), + fa_thresh=dict(usedefault=True, ), ignore_exception=dict(nohash=True, usedefault=True, @@ -19,8 +21,7 @@ def test_EstimateResponseSH_inputs(): in_mask=dict(), out_prefix=dict(), response=dict(), - save_glyph=dict(mandatory=True, - usedefault=True, + save_glyph=dict(usedefault=True, ), ) inputs = EstimateResponseSH.input_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_RESTORE.py b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py index a09fdd855a..7156bb26dc 100644 --- a/nipype/interfaces/dipy/tests/test_auto_RESTORE.py +++ b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py @@ -3,7 +3,9 @@ from nipype.interfaces.dipy.reconstruction import RESTORE def test_RESTORE_inputs(): - input_map = dict(ignore_exception=dict(nohash=True, + input_map = dict(b0_thres=dict(usedefault=True, + ), + ignore_exception=dict(nohash=True, usedefault=True, ), in_bval=dict(mandatory=True, From 05209eb8d9295ad85183293b0d7ec42e699603ac Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 28 Apr 2015 03:04:44 +0200 Subject: [PATCH 14/23] improved masks generation --- nipype/interfaces/dipy/preprocess.py | 38 +++++++++++++++++++++------- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index c17e2eac29..e87783341a 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -222,6 +222,8 @@ def nlmeans_proxy(in_file, settings, """ package_check('dipy', version='0.8.0.dev') from dipy.denoise.nlmeans import nlmeans + from scipy.ndimage.morphology import binary_erosion + from scipy import ndimage if out_file is None: fname, fext = op.splitext(op.basename(in_file)) @@ -235,24 +237,42 @@ def nlmeans_proxy(in_file, settings, data = img.get_data() aff = img.get_affine() - if data.ndims < 4: + if data.ndim < 4: data = data[..., np.newaxis] b0 = data[..., 0] if smask is None: smask = np.zeros_like(b0) - smask[b0 > np.percentile(b0, 0.85)] = 1 + smask[b0 > np.percentile(b0, 85.)] = 1 + + smask = binary_erosion(smask.astype(np.uint8), iterations=2).astype(np.uint8) if nmask is None: - nmask = np.zeros_like(b0) - try: - bmask = settings['mask'] - nmask[~bmask] = 1 - except AttributeError: - nmask[b0 < np.percentile(b0, 0.15)] = 1 + nmask = np.ones_like(b0, dtype=np.uint8) + bmask = settings['mask'] + if bmask is None: + bmask = np.zeros_like(b0) + bmask[b0 > np.percentile(b0, 55)] = 1 + label_im, nb_labels = ndimage.label(bmask) + sizes = ndimage.sum(bmask, label_im, range(nb_labels + 1)) + maxidx = np.argmax(sizes) + bmask = np.zeros_like(b0, dtype=np.uint8) + bmask[label_im == maxidx] = 1 + + nb.Nifti1Image(bmask, aff, + None).to_filename('bmask.nii.gz') + nmask[bmask > 0] = 0 else: nmask = np.squeeze(nmask) - nmask[nmask > 0] = 1 + nmask[nmask > 0.0] = 1 + nmask[nmask < 1] = 0 + nmask = nmask.astype(bool) + + nmask = binary_erosion(nmask, iterations=1).astype(np.uint8) + + nb.Nifti1Image(smask.astype(np.uint8), aff, + None).to_filename('smask.nii.gz') + den = np.zeros_like(data) snr = [] From d38be936a387a3d4ec8892d6b4e887965c314ed4 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 6 May 2015 17:18:47 +0200 Subject: [PATCH 15/23] improvements on snr computation for Denoise --- nipype/interfaces/dipy/preprocess.py | 42 +++++++++++++++++++--------- 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index e87783341a..38f721876e 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -104,6 +104,7 @@ class DenoiseInputSpec(TraitedSpec): 'will be computed'), exists=True) patch_radius = traits.Int(1, desc='patch radius') block_radius = traits.Int(5, desc='block_radius') + snr = traits.Float(desc='manually set an SNR') class DenoiseOutputSpec(TraitedSpec): @@ -239,28 +240,37 @@ def nlmeans_proxy(in_file, settings, if data.ndim < 4: data = data[..., np.newaxis] + + data = np.nan_to_num(data) + + if data.max() < 1.0e-4: + raise RuntimeError('There is no signal in the image') + + df = 1.0 + if data.max() < 1000.0: + df = 1000. / data.max() + data *= df + b0 = data[..., 0] if smask is None: smask = np.zeros_like(b0) smask[b0 > np.percentile(b0, 85.)] = 1 - smask = binary_erosion(smask.astype(np.uint8), iterations=2).astype(np.uint8) + smask = binary_erosion( + smask.astype(np.uint8), iterations=2).astype(np.uint8) if nmask is None: nmask = np.ones_like(b0, dtype=np.uint8) bmask = settings['mask'] if bmask is None: bmask = np.zeros_like(b0) - bmask[b0 > np.percentile(b0, 55)] = 1 + bmask[b0 > np.percentile(b0[b0 > 0], 10)] = 1 label_im, nb_labels = ndimage.label(bmask) sizes = ndimage.sum(bmask, label_im, range(nb_labels + 1)) maxidx = np.argmax(sizes) bmask = np.zeros_like(b0, dtype=np.uint8) bmask[label_im == maxidx] = 1 - - nb.Nifti1Image(bmask, aff, - None).to_filename('bmask.nii.gz') nmask[bmask > 0] = 0 else: nmask = np.squeeze(nmask) @@ -270,20 +280,26 @@ def nlmeans_proxy(in_file, settings, nmask = binary_erosion(nmask, iterations=1).astype(np.uint8) - nb.Nifti1Image(smask.astype(np.uint8), aff, - None).to_filename('smask.nii.gz') - - den = np.zeros_like(data) snr = [] + + est_snr = True + if isdefined(self.inputs.snr): + snr = [self.inputs.snr] * data.shape[-1] + est_snr = False + for i in range(data.shape[-1]): d = data[..., i] - s = np.mean(d[smask > 0]) - n = np.std(d[nmask > 0]) - snr.append(s/n) - den[..., i] = nlmeans(d, s/n, **settings) + if est_snr: + s = np.mean(d[smask > 0]) + n = np.std(d[nmask > 0]) + snr.append(s / n) + + den[..., i] = nlmeans(d, snr[i], **settings) den = np.squeeze(den) + den /= df + nb.Nifti1Image(den.astype(hdr.get_data_dtype()), aff, hdr).to_filename(out_file) return out_file, snr From 9982f49aabc5ab7572ca937e0d4edba47d01547a Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 6 May 2015 17:36:12 +0200 Subject: [PATCH 16/23] fix error --- nipype/interfaces/dipy/preprocess.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 38f721876e..931a936fbf 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -150,6 +150,10 @@ def _run_interface(self, runtime): if isdefined(self.inputs.block_radius): settings['block_radius'] = self.inputs.block_radius + snr = None + if isdefined(self.inputs.snr): + snr = self.inputs.snr + signal_mask = None if isdefined(self.inputs.signal_mask): signal_mask = nb.load(self.inputs.signal_mask).get_data() @@ -158,6 +162,7 @@ def _run_interface(self, runtime): noise_mask = nb.load(self.inputs.noise_mask).get_data() _, s = nlmeans_proxy(self.inputs.in_file, settings, + snr=snr, smask=signal_mask, nmask=noise_mask, out_file=out_file) @@ -215,6 +220,7 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): def nlmeans_proxy(in_file, settings, + snr=None, smask=None, nmask=None, out_file=None): @@ -281,12 +287,13 @@ def nlmeans_proxy(in_file, settings, nmask = binary_erosion(nmask, iterations=1).astype(np.uint8) den = np.zeros_like(data) - snr = [] est_snr = True - if isdefined(self.inputs.snr): + if snr is not None: snr = [self.inputs.snr] * data.shape[-1] est_snr = False + else: + snr = [] for i in range(data.shape[-1]): d = data[..., i] From cd7bb7643bbe88ed0d85a71bcd684882b6090fbc Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 7 May 2015 17:09:25 +0200 Subject: [PATCH 17/23] remove marshal to pickle --- nipype/interfaces/dipy/preprocess.py | 2 +- nipype/interfaces/dipy/reconstruction.py | 4 ++-- nipype/interfaces/dipy/tracks.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 931a936fbf..b350f99572 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -290,7 +290,7 @@ def nlmeans_proxy(in_file, settings, est_snr = True if snr is not None: - snr = [self.inputs.snr] * data.shape[-1] + snr = [snr] * data.shape[-1] est_snr = False else: snr = [] diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index bc1f21569c..3d249b4832 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -316,8 +316,8 @@ class CSD(DipyBaseInterface): def _run_interface(self, runtime): from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.data import get_sphere - import marshal as pickle - # import cPickle as pickle + # import marshal as pickle + import cPickle as pickle import gzip img = nb.load(self.inputs.in_file) diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 92ff2005b5..8f68b95788 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -153,8 +153,8 @@ def _run_interface(self, runtime): from dipy.reconst.peaks import peaks_from_model from dipy.tracking.eudx import EuDX from dipy.data import get_sphere - import marshal as pickle - # import cPickle as pickle + # import marshal as pickle + import cPickle as pickle import gzip if (not (isdefined(self.inputs.in_model) or From 465ad27914a34e697ddb093a141cb3c458d2a7c0 Mon Sep 17 00:00:00 2001 From: oesteban Date: Tue, 12 May 2015 12:24:24 +0200 Subject: [PATCH 18/23] add recursive estimation of signal response --- nipype/interfaces/dipy/reconstruction.py | 101 ++++++++++------------- 1 file changed, 45 insertions(+), 56 deletions(-) diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 3d249b4832..572f8bfbdc 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -163,14 +163,17 @@ class EstimateResponseSHInputSpec(DipyBaseInterfaceInputSpec): exists=True, desc=('input mask in which we find single fibers')) fa_thresh = traits.Float( 0.7, usedefault=True, desc=('default FA threshold')) - save_glyph = traits.Bool(False, usedefault=True, - desc=('save a png file of the response')) - response = File(desc=('the output response file')) + recursive = traits.Bool( + False, usedefault=True, + desc='use the recursive response estimator from dipy') + response = File( + 'response.txt', usedefault=True, desc=('the output response file')) + out_mask = File('wm_mask.nii.gz', usedefault=True, desc='computed wm mask') class EstimateResponseSHOutputSpec(TraitedSpec): - response = File(desc=('the response file')) - glyph_file = File(desc='graphical representation of the response') + response = File(exists=True, desc=('the response file')) + out_mask = File(exists=True, desc=('output wm mask')) class EstimateResponseSH(DipyBaseInterface): @@ -199,7 +202,8 @@ class EstimateResponseSH(DipyBaseInterface): def _run_interface(self, runtime): from dipy.core.gradients import GradientTable - from dipy.reconst.csdeconv import fractional_anisotropy + from dipy.reconst.dti import fractional_anisotropy, mean_diffusivity + from dipy.reconst.csdeconv import recursive_response img = nb.load(self.inputs.in_file) affine = img.get_affine() @@ -215,61 +219,46 @@ def _run_interface(self, runtime): gtab = self._get_gradient_table() evals = nb.load(self.inputs.in_evals).get_data() - FA = fractional_anisotropy(evals) - FA[np.isnan(FA)] = 0 - FA[msk != 1] = 0 - - indices = np.where(FA > self.inputs.fa_thresh) - - lambdas = evals[indices][:, :2] - S0s = data[indices][:, np.nonzero(gtab.b0s_mask)[0]] - S0 = np.mean(S0s) - l01 = np.mean(lambdas, axis=0) - respev = np.array([l01[0], l01[1], l01[1]]) - response = (respev, S0) - ratio = respev[1] / respev[0] - - if abs(ratio - 0.2) > 0.1: - iflogger.warn(('Estimated response is not prolate enough. ' - 'Ratio=%0.3f.') % ratio) - - np.savetxt(self._gen_outname(), - np.array(respev.tolist() + [S0]).reshape(-1)) - - if self.inputs.save_glyph: - from dipy.viz import fvtk - from dipy.data import get_sphere - from dipy.sims.voxel import single_tensor_odf + FA = np.nan_to_num(fractional_anisotropy(evals)) * msk + + if not self.inputs.recursive: + indices = np.where(FA > self.inputs.fa_thresh) + lambdas = evals[indices][:, :2] + S0s = data[indices][:, np.nonzero(gtab.b0s_mask)[0]] + S0 = np.mean(S0s) + l01 = np.mean(lambdas, axis=0) + respev = np.array([l01[0], l01[1], l01[1]]) + response = np.array(respev.tolist() + [S0]).reshape(-1) + + ratio = abs(respev[1] / respev[0]) + if ratio > 0.25: + iflogger.warn(('Estimated response is not prolate enough. ' + 'Ratio=%0.3f.') % ratio) + else: + MD = np.nan_to_num(mean_diffusivity(evals)) * msk + indices = np.logical_or( + FA >= 0.4, (np.logical_and(FA >= 0.15, MD >= 0.0011))) + data = nb.load(self.inputs.in_file).get_data() + response = recursive_response(gtab, data, mask=indices, sh_order=8, + peak_thr=0.01, init_fa=0.08, + init_trace=0.0021, iter=8, + convergence=0.001, + parallel=True) + + np.savetxt(op.abspath(self.inputs.response), response) + + wm_mask = np.zeros_like(FA) + wm_mask[indices] = 1 + nb.Nifti1Image( + wm_mask.astype(np.uint8), affine, + None).to_filename(op.abspath(self.inputs.out_mask)) - ren = fvtk.ren() - evecs = np.array([[0, 1, 0], [0, 0, 1], [1, 0, 0]]).T - sphere = get_sphere('symmetric724') - response_odf = single_tensor_odf(sphere.vertices, respev, evecs) - response_actor = fvtk.sphere_funcs(response_odf, sphere) - fvtk.add(ren, response_actor) - fvtk.record(ren, out_path=self._gen_outname() + '.png', - size=(200, 200)) - fvtk.rm(ren, response_actor) return runtime - def _gen_outname(self): - if isdefined(self.inputs.response): - return self.inputs.response - else: - fname, fext = op.splitext(op.basename(self.inputs.in_file)) - if fext == '.gz': - fname, fext2 = op.splitext(fname) - fext = fext2 + fext - return op.abspath(fname) + '_response.txt' - return out_file - def _list_outputs(self): outputs = self._outputs().get() - outputs['response'] = self._gen_outname() - - if isdefined(self.inputs.save_glyph) and self.inputs.save_glyph: - outputs['glyph_file'] = self._gen_outname() + '.png' - + outputs['response'] = op.abspath(self.inputs.response) + outputs['out_mask'] = op.abspath(self.inputs.out_mask) return outputs From 1e026e997cf60a75a87eb6c40ea8635d3b469fb3 Mon Sep 17 00:00:00 2001 From: oesteban Date: Mon, 25 May 2015 11:50:51 +0200 Subject: [PATCH 19/23] several improvements in Estimator interface --- nipype/interfaces/dipy/reconstruction.py | 56 +++++++++++++++--------- 1 file changed, 36 insertions(+), 20 deletions(-) diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 572f8bfbdc..4ff351a59e 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -162,9 +162,14 @@ class EstimateResponseSHInputSpec(DipyBaseInterfaceInputSpec): in_mask = File( exists=True, desc=('input mask in which we find single fibers')) fa_thresh = traits.Float( - 0.7, usedefault=True, desc=('default FA threshold')) + 0.7, usedefault=True, desc=('FA threshold')) + roi_radius = traits.Int( + 10, usedefault=True, desc=('ROI radius to be used in auto_response')) + auto = traits.Bool( + True, usedefault=True, xor=['recursive'], + desc='use the auto_response estimator from dipy') recursive = traits.Bool( - False, usedefault=True, + False, usedefault=True, xor=['auto'], desc='use the recursive response estimator from dipy') response = File( 'response.txt', usedefault=True, desc=('the output response file')) @@ -203,7 +208,7 @@ class EstimateResponseSH(DipyBaseInterface): def _run_interface(self, runtime): from dipy.core.gradients import GradientTable from dipy.reconst.dti import fractional_anisotropy, mean_diffusivity - from dipy.reconst.csdeconv import recursive_response + from dipy.reconst.csdeconv import recursive_response, auto_response img = nb.load(self.inputs.in_file) affine = img.get_affine() @@ -218,23 +223,18 @@ def _run_interface(self, runtime): data = img.get_data().astype(np.float32) gtab = self._get_gradient_table() - evals = nb.load(self.inputs.in_evals).get_data() + evals = np.nan_to_num(nb.load(self.inputs.in_evals).get_data()) FA = np.nan_to_num(fractional_anisotropy(evals)) * msk - - if not self.inputs.recursive: - indices = np.where(FA > self.inputs.fa_thresh) - lambdas = evals[indices][:, :2] - S0s = data[indices][:, np.nonzero(gtab.b0s_mask)[0]] - S0 = np.mean(S0s) - l01 = np.mean(lambdas, axis=0) - respev = np.array([l01[0], l01[1], l01[1]]) - response = np.array(respev.tolist() + [S0]).reshape(-1) - - ratio = abs(respev[1] / respev[0]) - if ratio > 0.25: - iflogger.warn(('Estimated response is not prolate enough. ' - 'Ratio=%0.3f.') % ratio) - else: + indices = np.where(FA > self.inputs.fa_thresh) + S0s = data[indices][:, np.nonzero(gtab.b0s_mask)[0]] + S0 = np.mean(S0s) + + if self.inputs.auto: + response, ratio = auto_response(gtab, data, + roi_radius=self.inputs.roi_radius, + fa_thr=self.inputs.fa_thresh) + response = response[0].tolist() + [S0] + elif self.inputs.recursive: MD = np.nan_to_num(mean_diffusivity(evals)) * msk indices = np.logical_or( FA >= 0.4, (np.logical_and(FA >= 0.15, MD >= 0.0011))) @@ -244,6 +244,23 @@ def _run_interface(self, runtime): init_trace=0.0021, iter=8, convergence=0.001, parallel=True) + ratio = abs(response[1] / response[0]) + else: + lambdas = evals[indices] + l01 = np.sort(np.mean(lambdas, axis=0)) + + response = np.array([l01[-1], l01[-2], l01[-2], S0]) + ratio = abs(response[1] / response[0]) + + if ratio > 0.25: + iflogger.warn(('Estimated response is not prolate enough. ' + 'Ratio=%0.3f.') % ratio) + elif ratio < 1.e-5 or np.any(np.isnan(response)): + response = np.array([1.8e-3, 3.6e-4, 3.6e-4, S0]) + iflogger.warn( + ('Estimated response is not valid, using a default one')) + else: + iflogger.info(('Estimated response: %s') % str(response[:3])) np.savetxt(op.abspath(self.inputs.response), response) @@ -252,7 +269,6 @@ def _run_interface(self, runtime): nb.Nifti1Image( wm_mask.astype(np.uint8), affine, None).to_filename(op.abspath(self.inputs.out_mask)) - return runtime def _list_outputs(self): From 5c34854971d9b39bae73504f95d9c9c12b9ada07 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sun, 31 Jan 2016 15:39:20 -0800 Subject: [PATCH 20/23] rebased dipy and adapted old interfaces --- nipype/interfaces/dipy/base.py | 40 ++++- nipype/interfaces/dipy/preprocess.py | 28 +--- nipype/interfaces/dipy/reconstruction.py | 55 +++---- nipype/interfaces/dipy/simulate.py | 30 ++-- nipype/interfaces/dipy/tensors.py | 147 +++++------------- nipype/interfaces/dipy/tests/test_auto_CSD.py | 15 +- nipype/interfaces/dipy/tests/test_auto_DTI.py | 12 +- .../dipy/tests/test_auto_Denoise.py | 1 + .../dipy/tests/test_auto_DipyBaseInterface.py | 20 +-- .../tests/test_auto_DipyDiffusionInterface.py | 25 +++ .../tests/test_auto_EstimateResponseSH.py | 32 ++-- .../dipy/tests/test_auto_RESTORE.py | 15 +- .../tests/test_auto_StreamlineTractography.py | 15 +- .../dipy/tests/test_auto_TensorMode.py | 12 +- .../dipy/tests/test_auto_TrackDensityMap.py | 3 + nipype/interfaces/dipy/tracks.py | 20 +-- 16 files changed, 220 insertions(+), 250 deletions(-) create mode 100644 nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index 0d29b3f371..d760172710 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -1,8 +1,44 @@ # -*- coding: utf-8 -*- -import numpy as np +""" Base interfaces for dipy """ import os.path as op +import numpy as np from nipype.interfaces.base import (traits, File, isdefined, BaseInterface, BaseInterfaceInputSpec) +from ... import logging + +IFLOGGER = logging.getLogger('interface') + +HAVE_DIPY = True +try: + import dipy +except ImportError: + HAVE_DIPY = False + + +def no_dipy(): + """ Check if dipy is available """ + global HAVE_DIPY + return not HAVE_DIPY + + +def dipy_version(): + """ Check dipy version """ + if no_dipy(): + return None + + return dipy.__version__ + + +class DipyBaseInterface(BaseInterface): + + """ + A base interface for py:mod:`dipy` computations + """ + def __init__(self, **inputs): + if no_dipy(): + IFLOGGER.error('dipy was not found') + # raise ImportError('dipy was not found') + super(DipyBaseInterface, self).__init__(**inputs) class DipyBaseInterfaceInputSpec(BaseInterfaceInputSpec): @@ -13,7 +49,7 @@ class DipyBaseInterfaceInputSpec(BaseInterfaceInputSpec): out_prefix = traits.Str(desc=('output prefix for file names')) -class DipyBaseInterface(BaseInterface): +class DipyDiffusionInterface(DipyBaseInterface): """ A base interface for py:mod:`dipy` computations diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 5ffa2178a0..198765a244 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -8,25 +8,13 @@ >>> os.chdir(datadir) """ import os.path as op -import warnings - import nibabel as nb import numpy as np -from ..base import (traits, TraitedSpec, BaseInterface, File, isdefined) -from ...utils.filemanip import split_filename -from ...utils.misc import package_check +from ..base import (traits, TraitedSpec, File, isdefined) +from .base import DipyBaseInterface from ... import logging -iflogger = logging.getLogger('interface') - -have_dipy = True -try: - package_check('dipy', version='0.6.0') -except Exception as e: - have_dipy = False -else: - from dipy.align.aniso2iso import resample - from dipy.core.gradients import GradientTable +IFLOGGER = logging.getLogger('interface') class ResampleInputSpec(TraitedSpec): @@ -46,7 +34,7 @@ class ResampleOutputSpec(TraitedSpec): out_file = File(exists=True) -class Resample(BaseInterface): +class Resample(DipyBaseInterface): """ An interface to reslicing diffusion datasets. @@ -75,7 +63,7 @@ def _run_interface(self, runtime): resample_proxy(self.inputs.in_file, order=order, new_zooms=vox_size, out_file=out_file) - iflogger.info('Resliced image saved as {i}'.format(i=out_file)) + IFLOGGER.info('Resliced image saved as {i}'.format(i=out_file)) return runtime def _list_outputs(self): @@ -111,7 +99,7 @@ class DenoiseOutputSpec(TraitedSpec): out_file = File(exists=True) -class Denoise(BaseInterface): +class Denoise(DipyBaseInterface): """ An interface to denoising diffusion datasets [Coupe2008]_. @@ -166,7 +154,7 @@ def _run_interface(self, runtime): smask=signal_mask, nmask=noise_mask, out_file=out_file) - iflogger.info(('Denoised image saved as {i}, estimated ' + IFLOGGER.info(('Denoised image saved as {i}, estimated ' 'SNR={s}').format(i=out_file, s=str(s))) return runtime @@ -187,6 +175,7 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): """ Performs regridding of an image to set isotropic voxel sizes using dipy. """ + from dipy.align.aniso2iso import resample if out_file is None: fname, fext = op.splitext(op.basename(in_file)) @@ -227,7 +216,6 @@ def nlmeans_proxy(in_file, settings, """ Uses non-local means to denoise 4D datasets """ - package_check('dipy', version='0.8.0.dev') from dipy.denoise.nlmeans import nlmeans from scipy.ndimage.morphology import binary_erosion from scipy import ndimage diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 4ff351a59e..6a01eacbf0 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -1,23 +1,18 @@ # -*- coding: utf-8 -*- -import os +""" +Interfaces to the reconstruction algorithms in dipy + +""" import os.path as op import numpy as np -from dipy.core.gradients import GradientTable import nibabel as nb -from nipype.interfaces.base import (TraitedSpec, File, InputMultiPath, - OutputMultiPath, Undefined, traits, - isdefined, OutputMultiPath, - CommandLineInputSpec, CommandLine, - BaseInterface, BaseInterfaceInputSpec, - traits) -from nipype.utils.filemanip import split_filename, fname_presuffix - -from .base import DipyBaseInterface, DipyBaseInterfaceInputSpec +from nipype.interfaces.base import TraitedSpec, File, traits, isdefined +from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec from nipype import logging -iflogger = logging.getLogger('interface') +IFLOGGER = logging.getLogger('interface') class RESTOREInputSpec(DipyBaseInterfaceInputSpec): @@ -27,12 +22,12 @@ class RESTOREInputSpec(DipyBaseInterfaceInputSpec): class RESTOREOutputSpec(TraitedSpec): - fa = File(desc=('output fractional anisotropy (FA) map computed from ' - 'the fitted DTI')) - md = File(desc=('output mean diffusivity (MD) map computed from the ' - 'fitted DTI')) - rd = File(desc=('output radial diffusivity (RD) map computed from ' - 'the fitted DTI')) + fa = File(desc='output fractional anisotropy (FA) map computed from ' + 'the fitted DTI') + md = File(desc='output mean diffusivity (MD) map computed from the ' + 'fitted DTI') + rd = File(desc='output radial diffusivity (RD) map computed from ' + 'the fitted DTI') mode = File(desc=('output mode (MO) map computed from the fitted DTI')) trace = File(desc=('output the tensor trace map computed from the ' 'fitted DTI')) @@ -40,7 +35,7 @@ class RESTOREOutputSpec(TraitedSpec): evecs = File(desc=('output the eigenvectors of the fitted DTI')) -class RESTORE(DipyBaseInterface): +class RESTORE(DipyDiffusionInterface): """ Uses RESTORE [Chang2005]_ to perform DTI fitting with outlier detection. @@ -92,7 +87,7 @@ def _run_interface(self, runtime): noise_msk = noise_msk.astype(np.uint8) try_b0 = False elif np.all(data[msk == 0, 0] == 0): - iflogger.info('Input data are masked.') + IFLOGGER.info('Input data are masked.') noise_msk = msk.reshape(-1).astype(np.uint8) else: noise_msk = (1 - msk).reshape(-1).astype(np.uint8) @@ -123,18 +118,18 @@ def _run_interface(self, runtime): sigma = mean_std * (1 + bias) if sigma == 0: - iflogger.warn( + IFLOGGER.warn( ('Noise std is 0.0, looks like data was masked and noise' ' cannot be estimated correctly. Using default tensor ' 'model instead of RESTORE.')) dti = TensorModel(gtab) else: - iflogger.info(('Performing RESTORE with noise std=%.4f.') % sigma) + IFLOGGER.info(('Performing RESTORE with noise std=%.4f.') % sigma) dti = TensorModel(gtab, fit_method='RESTORE', sigma=sigma) try: fit_restore = dti.fit(data, msk) - except TypeError as e: + except TypeError: dti = TensorModel(gtab) fit_restore = dti.fit(data, msk) @@ -181,7 +176,7 @@ class EstimateResponseSHOutputSpec(TraitedSpec): out_mask = File(exists=True, desc=('output wm mask')) -class EstimateResponseSH(DipyBaseInterface): +class EstimateResponseSH(DipyDiffusionInterface): """ Uses dipy to compute the single fiber response to be used in spherical @@ -253,14 +248,14 @@ def _run_interface(self, runtime): ratio = abs(response[1] / response[0]) if ratio > 0.25: - iflogger.warn(('Estimated response is not prolate enough. ' + IFLOGGER.warn(('Estimated response is not prolate enough. ' 'Ratio=%0.3f.') % ratio) elif ratio < 1.e-5 or np.any(np.isnan(response)): response = np.array([1.8e-3, 3.6e-4, 3.6e-4, S0]) - iflogger.warn( + IFLOGGER.warn( ('Estimated response is not valid, using a default one')) else: - iflogger.info(('Estimated response: %s') % str(response[:3])) + IFLOGGER.info(('Estimated response: %s') % str(response[:3])) np.savetxt(op.abspath(self.inputs.response), response) @@ -293,7 +288,7 @@ class CSDOutputSpec(TraitedSpec): out_fods = File(desc=('fODFs output file name')) -class CSD(DipyBaseInterface): +class CSD(DipyDiffusionInterface): """ Uses CSD [Tournier2007]_ to generate the fODF of DWIs. The interface uses @@ -344,13 +339,13 @@ def _run_interface(self, runtime): ratio = response[0][1] / response[0][0] if abs(ratio - 0.2) > 0.1: - iflogger.warn(('Estimated response is not prolate enough. ' + IFLOGGER.warn(('Estimated response is not prolate enough. ' 'Ratio=%0.3f.') % ratio) csd_model = ConstrainedSphericalDeconvModel( gtab, response, sh_order=self.inputs.sh_order) - iflogger.info('Fitting CSD model') + IFLOGGER.info('Fitting CSD model') csd_fit = csd_model.fit(data, msk) f = gzip.open(self._gen_filename('csdmodel', ext='.pklz'), 'wb') diff --git a/nipype/interfaces/dipy/simulate.py b/nipype/interfaces/dipy/simulate.py index 939d7d15a4..f4eb174a22 100644 --- a/nipype/interfaces/dipy/simulate.py +++ b/nipype/interfaces/dipy/simulate.py @@ -7,29 +7,18 @@ """ from __future__ import division +from multiprocessing import (Pool, cpu_count) +import os.path as op from builtins import range -import os.path as op -from multiprocessing import (Pool, cpu_count) import nibabel as nb -from ..base import (traits, TraitedSpec, BaseInterface, BaseInterfaceInputSpec, +from ..base import (traits, TraitedSpec, BaseInterfaceInputSpec, File, InputMultiPath, isdefined) -from ...utils.misc import package_check +from .base import DipyBaseInterface from ... import logging -iflogger = logging.getLogger('interface') - -have_dipy = True -try: - package_check('dipy', version='0.8.0') -except Exception as e: - have_dipy = False -else: - import numpy as np - from dipy.sims.voxel import (multi_tensor, add_noise, - all_tensor_evecs) - from dipy.core.gradients import gradient_table +IFLOGGER = logging.getLogger('interface') class SimulateMultiTensorInputSpec(BaseInterfaceInputSpec): @@ -77,7 +66,7 @@ class SimulateMultiTensorOutputSpec(TraitedSpec): out_bval = File(exists=True, desc='simulated b values') -class SimulateMultiTensor(BaseInterface): +class SimulateMultiTensor(DipyBaseInterface): """ Interface to MultiTensor model simulator in dipy @@ -101,6 +90,8 @@ class SimulateMultiTensor(BaseInterface): output_spec = SimulateMultiTensorOutputSpec def _run_interface(self, runtime): + from dipy.core.gradients import gradient_table + # Gradient table if isdefined(self.inputs.in_bval) and isdefined(self.inputs.in_bvec): # Load the gradient strengths and directions @@ -237,7 +228,7 @@ def _run_interface(self, runtime): pool = Pool(processes=n_proc) # Simulate sticks using dipy - iflogger.info(('Starting simulation of %d voxels, %d diffusion' + IFLOGGER.info(('Starting simulation of %d voxels, %d diffusion' ' directions.') % (len(args), ndirs)) result = np.array(pool.map(_compute_voxel, args)) if np.shape(result)[1] != ndirs: @@ -280,6 +271,7 @@ def _compute_voxel(args): .. [Pierpaoli1996] Pierpaoli et al., Diffusion tensor MR imaging of the human brain, Radiology 201:637-648. 1996. """ + from dipy.sims.voxel import multi_tensor ffs = args['fractions'] gtab = args['gradients'] @@ -297,7 +289,7 @@ def _compute_voxel(args): angles=args['sticks'], fractions=ffs, snr=snr) except Exception as e: pass - # iflogger.warn('Exception simulating dwi signal: %s' % e) + # IFLOGGER.warn('Exception simulating dwi signal: %s' % e) return signal.tolist() diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index 7eaa0180a6..c7d02c681c 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -5,81 +5,25 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -import os.path as op - import nibabel as nb -import numpy as np - -from ..base import (TraitedSpec, BaseInterface, File) -from ...utils.filemanip import split_filename -from ...utils.misc import package_check -from ... import logging -iflogger = logging.getLogger('interface') - -have_dipy = True -try: - package_check('dipy', version='0.6.0') -except Exception as e: - have_dipy = False -else: - from dipy.reconst import dti - from dipy.core.gradients import gradient_table - from dipy.io.utils import nifti1_symmat - - -def tensor_fitting(in_file, bvals, bvecs, mask_file=None): - """ - Use dipy to fit DTI - - Parameters - ---------- - in_file : str - Full path to a DWI data file. - bvals : str - Full path to a file containing gradient magnitude information (b-values). - bvecs : str - Full path to a file containing gradient direction information (b-vectors). - mask_file : str, optional - Full path to a file containing a binary mask. Defaults to use the entire volume. - - Returns - ------- - TensorFit object, affine - """ - img = nb.load(in_file) - data = img.get_data() - affine = img.affine - if mask_file is not None: - mask = nb.load(mask_file).get_data() - else: - mask = None - # Load information about the gradients: - gtab = gradient_table(bvals, bvecs) +from ..base import TraitedSpec, File, isdefined +from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec - # Fit it - tenmodel = dti.TensorModel(gtab) - return tenmodel.fit(data, mask), affine +from ... import logging +IFLOGGER = logging.getLogger('interface') -class DTIInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, - desc='The input 4D diffusion-weighted image file') - bvecs = File(exists=True, mandatory=True, - desc='The input b-vector text file') - bvals = File(exists=True, mandatory=True, - desc='The input b-value text file') +class DTIInputSpec(DipyBaseInterfaceInputSpec): mask_file = File(exists=True, desc='An optional white matter mask') - out_filename = File( - genfile=True, desc='The output filename for the DTI parameters image') class DTIOutputSpec(TraitedSpec): out_file = File(exists=True) -class DTI(BaseInterface): +class DTI(DipyDiffusionInterface): """ Calculates the diffusion tensor model parameters @@ -89,59 +33,51 @@ class DTI(BaseInterface): >>> import nipype.interfaces.dipy as dipy >>> dti = dipy.DTI() >>> dti.inputs.in_file = 'diffusion.nii' - >>> dti.inputs.bvecs = 'bvecs' - >>> dti.inputs.bvals = 'bvals' + >>> dti.inputs.in_bvec = 'bvecs' + >>> dti.inputs.in_bval = 'bvals' >>> dti.run() # doctest: +SKIP """ input_spec = DTIInputSpec output_spec = DTIOutputSpec def _run_interface(self, runtime): - ten_fit, affine = tensor_fitting(self.inputs.in_file, - self.inputs.bvals, - self.inputs.bvecs, - self.inputs.mask_file) + from dipy.reconst import dti + from dipy.io.utils import nifti1_symmat + gtab = self._get_gradient_table() + + img = nb.load(self.inputs.in_file) + data = img.get_data() + affine = img.affine + mask = None + if isdefined(self.inputs.mask_file): + mask = nb.load(self.inputs.mask_file).get_data() + + # Fit it + tenmodel = dti.TensorModel(gtab) + ten_fit = tenmodel.fit(data, mask) lower_triangular = tenfit.lower_triangular() img = nifti1_symmat(lower_triangular, affine) - out_file = op.abspath(self._gen_outfilename()) + out_file = self._gen_filename('dti') nb.save(img, out_file) - iflogger.info('DTI parameters image saved as {i}'.format(i=out_file)) + IFLOGGER.info('DTI parameters image saved as {i}'.format(i=out_file)) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs['out_file'] = self._gen_filename('dti') return outputs - def _gen_filename(self, name): - if name is 'out_filename': - return self._gen_outfilename() - else: - return None - - def _gen_outfilename(self): - _, name, _ = split_filename(self.inputs.in_file) - return name + '_dti.nii' - -class TensorModeInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, - desc='The input 4D diffusion-weighted image file') - bvecs = File(exists=True, mandatory=True, - desc='The input b-vector text file') - bvals = File(exists=True, mandatory=True, - desc='The input b-value text file') +class TensorModeInputSpec(DipyBaseInterfaceInputSpec): mask_file = File(exists=True, desc='An optional white matter mask') - out_filename = File( - genfile=True, desc='The output filename for the Tensor mode image') class TensorModeOutputSpec(TraitedSpec): out_file = File(exists=True) -class TensorMode(BaseInterface): +class TensorMode(DipyDiffusionInterface): """ Creates a map of the mode of the diffusion tensors given a set of @@ -160,26 +96,23 @@ class TensorMode(BaseInterface): >>> import nipype.interfaces.dipy as dipy >>> mode = dipy.TensorMode() >>> mode.inputs.in_file = 'diffusion.nii' - >>> mode.inputs.bvecs = 'bvecs' - >>> mode.inputs.bvals = 'bvals' + >>> mode.inputs.in_bvec = 'bvecs' + >>> mode.inputs.in_bval = 'bvals' >>> mode.run() # doctest: +SKIP """ input_spec = TensorModeInputSpec output_spec = TensorModeOutputSpec def _run_interface(self, runtime): + from dipy.reconst import dti + # Load the 4D image files img = nb.load(self.inputs.in_file) data = img.get_data() affine = img.get_affine() # Load the gradient strengths and directions - bvals = np.loadtxt(self.inputs.bvals) - gradients = np.loadtxt(self.inputs.bvecs).T - - # Place in Dipy's preferred format - gtab = GradientTable(gradients) - gtab.bvals = bvals + gtab = self._get_gradient_table() # Mask the data so that tensors are not fit for # unnecessary voxels @@ -194,22 +127,12 @@ def _run_interface(self, runtime): # Write as a 3D Nifti image with the original affine img = nb.Nifti1Image(mode_data, affine) - out_file = op.abspath(self._gen_outfilename()) + out_file = self._gen_filename('mode') nb.save(img, out_file) - iflogger.info('Tensor mode image saved as {i}'.format(i=out_file)) + IFLOGGER.info('Tensor mode image saved as {i}'.format(i=out_file)) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs['out_file'] = self._gen_filename('mode') return outputs - - def _gen_filename(self, name): - if name is 'out_filename': - return self._gen_outfilename() - else: - return None - - def _gen_outfilename(self): - _, name, _ = split_filename(self.inputs.in_file) - return name + '_mode.nii' diff --git a/nipype/interfaces/dipy/tests/test_auto_CSD.py b/nipype/interfaces/dipy/tests/test_auto_CSD.py index f2339fed2a..d99aac4527 100644 --- a/nipype/interfaces/dipy/tests/test_auto_CSD.py +++ b/nipype/interfaces/dipy/tests/test_auto_CSD.py @@ -1,6 +1,7 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from nipype.testing import assert_equal -from nipype.interfaces.dipy.reconstruction import CSD +from ....testing import assert_equal +from ..reconstruction import CSD + def test_CSD_inputs(): input_map = dict(b0_thres=dict(usedefault=True, @@ -27,17 +28,17 @@ def test_CSD_inputs(): ) inputs = CSD.input_spec() - for key, metadata in input_map.items(): - for metakey, value in metadata.items(): + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): yield assert_equal, getattr(inputs.traits()[key], metakey), value + def test_CSD_outputs(): output_map = dict(model=dict(), out_fods=dict(), ) outputs = CSD.output_spec() - for key, metadata in output_map.items(): - for metakey, value in metadata.items(): + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): yield assert_equal, getattr(outputs.traits()[key], metakey), value - diff --git a/nipype/interfaces/dipy/tests/test_auto_DTI.py b/nipype/interfaces/dipy/tests/test_auto_DTI.py index 0abba84540..eb35d6a9e2 100644 --- a/nipype/interfaces/dipy/tests/test_auto_DTI.py +++ b/nipype/interfaces/dipy/tests/test_auto_DTI.py @@ -4,15 +4,19 @@ def test_DTI_inputs(): - input_map = dict(bvals=dict(mandatory=True, + input_map = dict(b0_thres=dict(usedefault=True, ), - bvecs=dict(mandatory=True, + ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_bval=dict(mandatory=True, + ), + in_bvec=dict(mandatory=True, ), in_file=dict(mandatory=True, ), mask_file=dict(), - out_filename=dict(genfile=True, - ), + out_prefix=dict(), ) inputs = DTI.input_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_Denoise.py b/nipype/interfaces/dipy/tests/test_auto_Denoise.py index fa00b27abd..6a400231c4 100644 --- a/nipype/interfaces/dipy/tests/test_auto_Denoise.py +++ b/nipype/interfaces/dipy/tests/test_auto_Denoise.py @@ -14,6 +14,7 @@ def test_Denoise_inputs(): ), patch_radius=dict(), signal_mask=dict(), + snr=dict(), ) inputs = Denoise.input_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py b/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py index 4b0fed9022..ce3bd17584 100644 --- a/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py +++ b/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py @@ -1,24 +1,16 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from nipype.testing import assert_equal -from nipype.interfaces.dipy.base import DipyBaseInterface +from ....testing import assert_equal +from ..base import DipyBaseInterface + def test_DipyBaseInterface_inputs(): - input_map = dict(b0_thres=dict(usedefault=True, - ), - ignore_exception=dict(nohash=True, + input_map = dict(ignore_exception=dict(nohash=True, usedefault=True, ), - in_bval=dict(mandatory=True, - ), - in_bvec=dict(mandatory=True, - ), - in_file=dict(mandatory=True, - ), - out_prefix=dict(), ) inputs = DipyBaseInterface.input_spec() - for key, metadata in input_map.items(): - for metakey, value in metadata.items(): + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): yield assert_equal, getattr(inputs.traits()[key], metakey), value diff --git a/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py b/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py new file mode 100644 index 0000000000..e785433355 --- /dev/null +++ b/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py @@ -0,0 +1,25 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ....testing import assert_equal +from ..base import DipyDiffusionInterface + + +def test_DipyDiffusionInterface_inputs(): + input_map = dict(b0_thres=dict(usedefault=True, + ), + ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_bval=dict(mandatory=True, + ), + in_bvec=dict(mandatory=True, + ), + in_file=dict(mandatory=True, + ), + out_prefix=dict(), + ) + inputs = DipyDiffusionInterface.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + diff --git a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py index 117fb15ee2..95e702bd50 100644 --- a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py +++ b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py @@ -1,9 +1,13 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from nipype.testing import assert_equal -from nipype.interfaces.dipy.reconstruction import EstimateResponseSH +from ....testing import assert_equal +from ..reconstruction import EstimateResponseSH + def test_EstimateResponseSH_inputs(): - input_map = dict(b0_thres=dict(usedefault=True, + input_map = dict(auto=dict(usedefault=True, + xor=['recursive'], + ), + b0_thres=dict(usedefault=True, ), fa_thresh=dict(usedefault=True, ), @@ -19,24 +23,30 @@ def test_EstimateResponseSH_inputs(): in_file=dict(mandatory=True, ), in_mask=dict(), + out_mask=dict(usedefault=True, + ), out_prefix=dict(), - response=dict(), - save_glyph=dict(usedefault=True, + recursive=dict(usedefault=True, + xor=['auto'], + ), + response=dict(usedefault=True, + ), + roi_radius=dict(usedefault=True, ), ) inputs = EstimateResponseSH.input_spec() - for key, metadata in input_map.items(): - for metakey, value in metadata.items(): + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): yield assert_equal, getattr(inputs.traits()[key], metakey), value + def test_EstimateResponseSH_outputs(): - output_map = dict(glyph_file=dict(), + output_map = dict(out_mask=dict(), response=dict(), ) outputs = EstimateResponseSH.output_spec() - for key, metadata in output_map.items(): - for metakey, value in metadata.items(): + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): yield assert_equal, getattr(outputs.traits()[key], metakey), value - diff --git a/nipype/interfaces/dipy/tests/test_auto_RESTORE.py b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py index 7156bb26dc..c06bc74573 100644 --- a/nipype/interfaces/dipy/tests/test_auto_RESTORE.py +++ b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py @@ -1,6 +1,7 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from nipype.testing import assert_equal -from nipype.interfaces.dipy.reconstruction import RESTORE +from ....testing import assert_equal +from ..reconstruction import RESTORE + def test_RESTORE_inputs(): input_map = dict(b0_thres=dict(usedefault=True, @@ -20,10 +21,11 @@ def test_RESTORE_inputs(): ) inputs = RESTORE.input_spec() - for key, metadata in input_map.items(): - for metakey, value in metadata.items(): + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): yield assert_equal, getattr(inputs.traits()[key], metakey), value + def test_RESTORE_outputs(): output_map = dict(evals=dict(), evecs=dict(), @@ -35,7 +37,6 @@ def test_RESTORE_outputs(): ) outputs = RESTORE.output_spec() - for key, metadata in output_map.items(): - for metakey, value in metadata.items(): + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): yield assert_equal, getattr(outputs.traits()[key], metakey), value - diff --git a/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py b/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py index 0a910010c4..b4c4dae679 100644 --- a/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py +++ b/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py @@ -1,6 +1,7 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from nipype.testing import assert_equal -from nipype.interfaces.dipy.tracks import StreamlineTractography +from ....testing import assert_equal +from ..tracks import StreamlineTractography + def test_StreamlineTractography_inputs(): input_map = dict(gfa_thresh=dict(mandatory=True, @@ -35,10 +36,11 @@ def test_StreamlineTractography_inputs(): ) inputs = StreamlineTractography.input_spec() - for key, metadata in input_map.items(): - for metakey, value in metadata.items(): + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): yield assert_equal, getattr(inputs.traits()[key], metakey), value + def test_StreamlineTractography_outputs(): output_map = dict(gfa=dict(), odf_peaks=dict(), @@ -47,7 +49,6 @@ def test_StreamlineTractography_outputs(): ) outputs = StreamlineTractography.output_spec() - for key, metadata in output_map.items(): - for metakey, value in metadata.items(): + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): yield assert_equal, getattr(outputs.traits()[key], metakey), value - diff --git a/nipype/interfaces/dipy/tests/test_auto_TensorMode.py b/nipype/interfaces/dipy/tests/test_auto_TensorMode.py index dc2f0a06ff..53f77d5d33 100644 --- a/nipype/interfaces/dipy/tests/test_auto_TensorMode.py +++ b/nipype/interfaces/dipy/tests/test_auto_TensorMode.py @@ -4,15 +4,19 @@ def test_TensorMode_inputs(): - input_map = dict(bvals=dict(mandatory=True, + input_map = dict(b0_thres=dict(usedefault=True, ), - bvecs=dict(mandatory=True, + ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_bval=dict(mandatory=True, + ), + in_bvec=dict(mandatory=True, ), in_file=dict(mandatory=True, ), mask_file=dict(), - out_filename=dict(genfile=True, - ), + out_prefix=dict(), ) inputs = TensorMode.input_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py b/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py index 5c2e2d0b8d..187c4c0d49 100644 --- a/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py +++ b/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py @@ -5,6 +5,9 @@ def test_TrackDensityMap_inputs(): input_map = dict(data_dims=dict(), + ignore_exception=dict(nohash=True, + usedefault=True, + ), in_file=dict(mandatory=True, ), out_filename=dict(usedefault=True, diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 10aa47cc54..1b0ad6f381 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -11,22 +11,14 @@ import nibabel as nb import nibabel.trackvis as nbt -from ..base import (TraitedSpec, BaseInterface, BaseInterfaceInputSpec, +from ..base import (TraitedSpec, BaseInterfaceInputSpec, File, isdefined, traits) -from ...utils.misc import package_check +from .base import DipyBaseInterface from ... import logging IFLOGGER = logging.getLogger('interface') -have_dipy = True -try: - package_check('dipy', version='0.6.0') -except Exception as e: - have_dipy = False -else: - from dipy.tracking.utils import density_map - -class TrackDensityMapInputSpec(TraitedSpec): +class TrackDensityMapInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc='The input TrackVis track file') reference = File(exists=True, @@ -46,7 +38,7 @@ class TrackDensityMapOutputSpec(TraitedSpec): out_file = File(exists=True) -class TrackDensityMap(BaseInterface): +class TrackDensityMap(DipyBaseInterface): """ Creates a tract density image from a TrackVis track file using functions @@ -66,6 +58,8 @@ class TrackDensityMap(BaseInterface): def _run_interface(self, runtime): from numpy import min_scalar_type + from dipy.tracking.utils import density_map + tracks, header = nbt.read(self.inputs.in_file) streams = ((ii[0]) for ii in tracks) @@ -145,7 +139,7 @@ class StreamlineTractographyOutputSpec(TraitedSpec): ' in seeding.')) -class StreamlineTractography(BaseInterface): +class StreamlineTractography(DipyBaseInterface): """ Streamline tractography using EuDX [Garyfallidis12]_. From e8c034738508bc0ec445cfd0c9bbc7a1e9142cb2 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 1 Feb 2016 07:41:44 -0800 Subject: [PATCH 21/23] added dipy to CircleCI --- circle.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/circle.yml b/circle.yml index cbbca57f41..8b8b6285f2 100644 --- a/circle.yml +++ b/circle.yml @@ -21,7 +21,7 @@ dependencies: # Set up python environment - pip install --upgrade pip - pip install -e . - - pip install matplotlib sphinx ipython boto + - pip install matplotlib sphinx ipython boto dipy - gem install fakes3 - if [[ ! -d ~/examples/data ]]; then wget "http://tcpdiag.dl.sourceforge.net/project/nipy/nipype/nipype-0.2/nipype-tutorial.tar.bz2" && tar jxvf nipype-tutorial.tar.bz2 && mv nipype-tutorial/* ~/examples/; fi - if [[ ! -d ~/examples/fsl_course_data ]]; then wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/fdt1.tar.gz" && wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/fdt2.tar.gz" && wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/tbss.tar.gz" && mkdir ~/examples/fsl_course_data && tar zxvf fdt1.tar.gz -C ~/examples/fsl_course_data && tar zxvf fdt2.tar.gz -C ~/examples/fsl_course_data && tar zxvf tbss.tar.gz -C ~/examples/fsl_course_data; fi From 10476d02ffb7bfdbcb26a95f06dfb5a837c58d03 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 1 Feb 2016 16:13:02 -0800 Subject: [PATCH 22/23] added fix for https://github.com/nipy/nipype/pull/1335#issuecomment-176277315 --- nipype/interfaces/dipy/preprocess.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 198765a244..143f239e6c 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -175,7 +175,7 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): """ Performs regridding of an image to set isotropic voxel sizes using dipy. """ - from dipy.align.aniso2iso import resample + from dipy.align.reslice import reslice if out_file is None: fname, fext = op.splitext(op.basename(in_file)) @@ -197,7 +197,7 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): if np.all(im_zooms == new_zooms): return in_file - data2, affine2 = resample(data, affine, im_zooms, new_zooms, order=order) + data2, affine2 = reslice(data, affine, im_zooms, new_zooms, order=order) tmp_zooms = np.array(hdr.get_zooms()) tmp_zooms[:3] = new_zooms[0] hdr.set_zooms(tuple(tmp_zooms)) From 0a1f066cac5ba2c249bd8f47115c54894423caa6 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 4 Feb 2016 10:36:18 -0800 Subject: [PATCH 23/23] update auto test file created by outdated installation --- .../interfaces/tests/test_auto_S3DataSink.py | 44 ------------------- 1 file changed, 44 deletions(-) delete mode 100644 nipype/interfaces/tests/test_auto_S3DataSink.py diff --git a/nipype/interfaces/tests/test_auto_S3DataSink.py b/nipype/interfaces/tests/test_auto_S3DataSink.py deleted file mode 100644 index 9ef342defb..0000000000 --- a/nipype/interfaces/tests/test_auto_S3DataSink.py +++ /dev/null @@ -1,44 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from ...testing import assert_equal -from ..io import S3DataSink - - -def test_S3DataSink_inputs(): - input_map = dict(_outputs=dict(usedefault=True, - ), - anon=dict(usedefault=True, - ), - base_directory=dict(), - bucket=dict(mandatory=True, - ), - bucket_path=dict(usedefault=True, - ), - container=dict(), - ignore_exception=dict(nohash=True, - usedefault=True, - ), - parameterization=dict(usedefault=True, - ), - regexp_substitutions=dict(), - remove_dest_dir=dict(usedefault=True, - ), - strip_dir=dict(), - substitutions=dict(), - testing=dict(usedefault=True, - ), - ) - inputs = S3DataSink.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(inputs.traits()[key], metakey), value - - -def test_S3DataSink_outputs(): - output_map = dict(out_file=dict(), - ) - outputs = S3DataSink.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(outputs.traits()[key], metakey), value