diff --git a/circle.yml b/circle.yml index 4f0d31d060..cbbca57f41 100644 --- a/circle.yml +++ b/circle.yml @@ -23,8 +23,8 @@ dependencies: - pip install -e . - pip install matplotlib sphinx ipython boto - gem install fakes3 - - if [[ ! -d ~/examples/data ]]; then wget "http://tcpdiag.dl.sourceforge.net/project/nipy/nipype/nipype-0.2/nipype-tutorial.tar.bz2"; tar jxvf nipype-tutorial.tar.bz2; mkdir ~/examples; mv nipype-tutorial/* ~/examples/; fi - - if [[ ! -d ~/examples/fsl_course_data ]]; then wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/fdt1.tar.gz" ; wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/fdt2.tar.gz"; wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/tbss.tar.gz"; mkdir ~/examples/fsl_course_data; tar zxvf fdt1.tar.gz -C ~/examples/fsl_course_data; tar zxvf fdt2.tar.gz -C ~/examples/fsl_course_data; tar zxvf tbss.tar.gz -C ~/examples/fsl_course_data; fi + - if [[ ! -d ~/examples/data ]]; then wget "http://tcpdiag.dl.sourceforge.net/project/nipy/nipype/nipype-0.2/nipype-tutorial.tar.bz2" && tar jxvf nipype-tutorial.tar.bz2 && mv nipype-tutorial/* ~/examples/; fi + - if [[ ! -d ~/examples/fsl_course_data ]]; then wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/fdt1.tar.gz" && wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/fdt2.tar.gz" && wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/tbss.tar.gz" && mkdir ~/examples/fsl_course_data && tar zxvf fdt1.tar.gz -C ~/examples/fsl_course_data && tar zxvf fdt2.tar.gz -C ~/examples/fsl_course_data && tar zxvf tbss.tar.gz -C ~/examples/fsl_course_data; fi - bash ~/nipype/tools/install_spm_mcr.sh - mkdir -p ~/.nipype && echo '[logging]' > ~/.nipype/nipype.cfg && echo 'workflow_level = DEBUG' >> ~/.nipype/nipype.cfg && echo 'interface_level = DEBUG' >> ~/.nipype/nipype.cfg && echo 'filemanip_level = DEBUG' >> ~/.nipype/nipype.cfg machine: diff --git a/nipype/algorithms/tests/test_auto_ErrorMap.py b/nipype/algorithms/tests/test_auto_ErrorMap.py deleted file mode 100644 index 69484529dd..0000000000 --- a/nipype/algorithms/tests/test_auto_ErrorMap.py +++ /dev/null @@ -1,35 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from ...testing import assert_equal -from ..metrics import ErrorMap - - -def test_ErrorMap_inputs(): - input_map = dict(ignore_exception=dict(nohash=True, - usedefault=True, - ), - in_ref=dict(mandatory=True, - ), - in_tst=dict(mandatory=True, - ), - mask=dict(), - metric=dict(mandatory=True, - usedefault=True, - ), - out_map=dict(), - ) - inputs = ErrorMap.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(inputs.traits()[key], metakey), value - - -def test_ErrorMap_outputs(): - output_map = dict(distance=dict(), - out_map=dict(), - ) - outputs = ErrorMap.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(outputs.traits()[key], metakey), value diff --git a/nipype/algorithms/tests/test_auto_Overlap.py b/nipype/algorithms/tests/test_auto_Overlap.py deleted file mode 100644 index a5a3874bd1..0000000000 --- a/nipype/algorithms/tests/test_auto_Overlap.py +++ /dev/null @@ -1,47 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from ...testing import assert_equal -from ..misc import Overlap - - -def test_Overlap_inputs(): - input_map = dict(bg_overlap=dict(mandatory=True, - usedefault=True, - ), - ignore_exception=dict(nohash=True, - usedefault=True, - ), - mask_volume=dict(), - out_file=dict(usedefault=True, - ), - vol_units=dict(mandatory=True, - usedefault=True, - ), - volume1=dict(mandatory=True, - ), - volume2=dict(mandatory=True, - ), - weighting=dict(usedefault=True, - ), - ) - inputs = Overlap.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(inputs.traits()[key], metakey), value - - -def test_Overlap_outputs(): - output_map = dict(dice=dict(), - diff_file=dict(), - jaccard=dict(), - labels=dict(), - roi_di=dict(), - roi_ji=dict(), - roi_voldiff=dict(), - volume_difference=dict(), - ) - outputs = Overlap.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(outputs.traits()[key], metakey), value diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index 22870d5d19..0f953c82e4 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -6,13 +6,14 @@ import os -import warnings +from ... import logging from ...utils.filemanip import split_filename from ..base import ( CommandLine, traits, CommandLineInputSpec, isdefined, File, TraitedSpec) -warn = warnings.warn +# Use nipype's logging system +iflogger = logging.getLogger('interface') class Info(object): @@ -40,26 +41,29 @@ def version(): try: clout = CommandLine(command='afni_vcheck', terminal_output='allatonce').run() + + # Try to parse the version number + currv = clout.runtime.stdout.split('\n')[1].split('=', 1)[1].strip() except IOError: # If afni_vcheck is not present, return None - warn('afni_vcheck executable not found.') + iflogger.warn('afni_vcheck executable not found.') return None except RuntimeError as e: - # If AFNI is outdated, afni_vcheck throws error - warn('AFNI is outdated') - return str(e).split('\n')[4].split('=', 1)[1].strip() - - # Try to parse the version number - out = clout.runtime.stdout.split('\n')[1].split('=', 1)[1].strip() + # If AFNI is outdated, afni_vcheck throws error. + # Show new version, but parse current anyways. + currv = str(e).split('\n')[4].split('=', 1)[1].strip() + nextv = str(e).split('\n')[6].split('=', 1)[1].strip() + iflogger.warn( + 'AFNI is outdated, detected version %s and %s is available.' % (currv, nextv)) - if out.startswith('AFNI_'): - out = out[5:] + if currv.startswith('AFNI_'): + currv = currv[5:] - v = out.split('.') + v = currv.split('.') try: v = [int(n) for n in v] except ValueError: - return out + return currv return tuple(v) @classmethod diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 058a99a1af..85f2a4eaf9 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -8,10 +8,10 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -import warnings import os import re +from warnings import warn from .base import AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec from ..base import CommandLineInputSpec, CommandLine, OutputMultiPath @@ -20,8 +20,6 @@ from ...utils.filemanip import (load_json, save_json, split_filename) from ...utils.filemanip import fname_presuffix -warn = warnings.warn - class To3DInputSpec(AFNICommandInputSpec): out_file = File(name_template="%s", desc='output image file name', diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 885a97a93a..5ecc49b957 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -23,9 +23,6 @@ package_check('dipy', version='0.6.0') except Exception as e: have_dipy = False -else: - from dipy.align.aniso2iso import resample - from dipy.core.gradients import GradientTable class ResampleInputSpec(TraitedSpec): @@ -172,6 +169,7 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): """ Performs regridding of an image to set isotropic voxel sizes using dipy. """ + from dipy.align.aniso2iso import resample if out_file is None: fname, fext = op.splitext(op.basename(in_file)) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py index 42689a458b..7da293e7bd 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py @@ -16,6 +16,7 @@ def test_Surface2VolTransform_inputs(): usedefault=True, ), mkmask=dict(argstr='--mkmask', + xor=['source_file'], ), projfrac=dict(argstr='--projfrac %s', ), @@ -26,6 +27,7 @@ def test_Surface2VolTransform_inputs(): source_file=dict(argstr='--surfval %s', copyfile=False, mandatory=True, + xor=['mkmask'], ), subject_id=dict(argstr='--identity %s', xor=['reg_file'], diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 0213dc30d4..5697130131 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -390,7 +390,7 @@ def _gen_filename(self, name): class Surface2VolTransformInputSpec(FSTraitedSpec): source_file = File(exists=True, argstr='--surfval %s', - copyfile=False, mandatory=True, + copyfile=False, mandatory=True, xor=['mkmask'], desc='This is the source of the surface values') hemi = traits.Str(argstr='--hemi %s', mandatory=True, desc='hemisphere of data') @@ -404,7 +404,7 @@ class Surface2VolTransformInputSpec(FSTraitedSpec): template_file = File(exists=True, argstr='--template %s', desc='Output template volume') mkmask = traits.Bool(desc='make a mask instead of loading surface values', - argstr='--mkmask') + argstr='--mkmask', xor=['source_file']) vertexvol_file = File(name_template="%s_asVol_vertex.nii", desc=('Path name of the vertex output volume, which ' 'is the same as output volume except that the ' diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py new file mode 100644 index 0000000000..00dbbfb3ea --- /dev/null +++ b/nipype/interfaces/petpvc.py @@ -0,0 +1,224 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +""" + Change directory to provide relative paths for doctests + >>> import os + >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) + >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) + >>> os.chdir(datadir) + +Nipype interface for PETPVC. + +PETPVC is a software from the Nuclear Medicine Department +of the UCL University Hospital, London, UK. + +Its source code is here: https://github.com/UCL/PETPVC + +The methods that it implement are explained here: +K. Erlandsson, I. Buvat, P. H. Pretorius, B. A. Thomas, and B. F. Hutton, +"A review of partial volume correction techniques for emission tomography +and their applications in neurology, cardiology and oncology," Phys. Med. +Biol., vol. 57, no. 21, p. R119, 2012. + +There is a publication waiting to be accepted for this software tool. + + +Its command line help shows this: + + -i --input < filename > + = PET image file + -o --output < filename > + = Output file + [ -m --mask < filename > ] + = Mask image file + -p --pvc < keyword > + = Desired PVC method + -x < X > + = The full-width at half maximum in mm along x-axis + -y < Y > + = The full-width at half maximum in mm along y-axis + -z < Z > + = The full-width at half maximum in mm along z-axis + [ -d --debug ] + = Prints debug information + [ -n --iter [ Val ] ] + = Number of iterations + With: Val (Default = 10) + [ -k [ Val ] ] + = Number of deconvolution iterations + With: Val (Default = 10) + [ -a --alpha [ aval ] ] + = Alpha value + With: aval (Default = 1.5) + [ -s --stop [ stopval ] ] + = Stopping criterion + With: stopval (Default = 0.01) + +---------------------------------------------- +Technique - keyword + +Geometric transfer matrix - "GTM" +Labbe approach - "LABBE" +Richardson-Lucy - "RL" +Van-Cittert - "VC" +Region-based voxel-wise correction - "RBV" +RBV with Labbe - "LABBE+RBV" +RBV with Van-Cittert - "RBV+VC" +RBV with Richardson-Lucy - "RBV+RL" +RBV with Labbe and Van-Cittert - "LABBE+RBV+VC" +RBV with Labbe and Richardson-Lucy- "LABBE+RBV+RL" +Multi-target correction - "MTC" +MTC with Labbe - "LABBE+MTC" +MTC with Van-Cittert - "MTC+VC" +MTC with Richardson-Lucy - "MTC+RL" +MTC with Labbe and Van-Cittert - "LABBE+MTC+VC" +MTC with Labbe and Richardson-Lucy- "LABBE+MTC+RL" +Iterative Yang - "IY" +Iterative Yang with Van-Cittert - "IY+VC" +Iterative Yang with Richardson-Lucy - "IY+RL" +Muller Gartner - "MG" +Muller Gartner with Van-Cittert - "MG+VC" +Muller Gartner with Richardson-Lucy - "MG+RL" + +""" +from __future__ import print_function +from __future__ import division + +import os +import warnings + +from nipype.interfaces.base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + File, + isdefined, + traits, +) + +warn = warnings.warn + +pvc_methods = ['GTM', + 'IY', + 'IY+RL', + 'IY+VC', + 'LABBE', + 'LABBE+MTC', + 'LABBE+MTC+RL', + 'LABBE+MTC+VC', + 'LABBE+RBV', + 'LABBE+RBV+RL', + 'LABBE+RBV+VC', + 'MG', + 'MG+RL', + 'MG+VC', + 'MTC', + 'MTC+RL', + 'MTC+VC', + 'RBV', + 'RBV+RL', + 'RBV+VC', + 'RL', + 'VC'] + + +class PETPVCInputSpec(CommandLineInputSpec): + in_file = File(desc="PET image file", exists=True, mandatory=True, argstr="-i %s") + out_file = File(desc="Output file", genfile=True, hash_files=False, argstr="-o %s") + mask_file = File(desc="Mask image file", exists=True, mandatory=True, argstr="-m %s") + pvc = traits.Enum(pvc_methods, desc="Desired PVC method", mandatory=True, argstr="-p %s") + fwhm_x = traits.Float(desc="The full-width at half maximum in mm along x-axis", mandatory=True, argstr="-x %.4f") + fwhm_y = traits.Float(desc="The full-width at half maximum in mm along y-axis", mandatory=True, argstr="-y %.4f") + fwhm_z = traits.Float(desc="The full-width at half maximum in mm along z-axis", mandatory=True, argstr="-z %.4f") + debug = traits.Bool (desc="Prints debug information", usedefault=True, default_value=False, argstr="-d") + n_iter = traits.Int (desc="Number of iterations", default_value=10, argstr="-n %d") + n_deconv = traits.Int (desc="Number of deconvolution iterations", default_value=10, argstr="-k %d") + alpha = traits.Float(desc="Alpha value", default_value=1.5, argstr="-a %.4f") + stop_crit = traits.Float(desc="Stopping criterion", default_value=0.01, argstr="-a %.4f") + + +class PETPVCOutputSpec(TraitedSpec): + out_file = File(desc = "Output file") + + +class PETPVC(CommandLine): + """ Use PETPVC for partial volume correction of PET images. + + Examples + -------- + >>> from ..testing import example_data + >>> #TODO get data for PETPVC + >>> pvc = PETPVC() + >>> pvc.inputs.in_file = 'pet.nii.gz' + >>> pvc.inputs.mask_file = 'tissues.nii.gz' + >>> pvc.inputs.out_file = 'pet_pvc_rbv.nii.gz' + >>> pvc.inputs.pvc = 'RBV' + >>> pvc.inputs.fwhm_x = 2.0 + >>> pvc.inputs.fwhm_y = 2.0 + >>> pvc.inputs.fwhm_z = 2.0 + >>> outs = pvc.run() #doctest: +SKIP + """ + input_spec = PETPVCInputSpec + output_spec = PETPVCOutputSpec + _cmd = 'petpvc' + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs['out_file'] = self.inputs.out_file + if not isdefined(outputs['out_file']): + method_name = self.inputs.pvc.lower() + outputs['out_file'] = self._gen_fname(self.inputs.in_file, + suffix='_{}_pvc'.format(method_name)) + + outputs['out_file'] = os.path.abspath(outputs['out_file']) + return outputs + + def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, + ext='.nii.gz'): + """Generate a filename based on the given parameters. + + The filename will take the form: cwd/basename. + If change_ext is True, it will use the extentions specified in + intputs.output_type. + + Parameters + ---------- + basename : str + Filename to base the new filename on. + cwd : str + Path to prefix to the new filename. (default is os.getcwd()) + suffix : str + Suffix to add to the `basename`. (defaults is '' ) + change_ext : bool + Flag to change the filename extension to the given `ext`. + (Default is False) + + Returns + ------- + fname : str + New filename based on given parameters. + + """ + from nipype.utils.filemanip import fname_presuffix + + if basename == '': + msg = 'Unable to generate filename for command %s. ' % self.cmd + msg += 'basename is not set!' + raise ValueError(msg) + if cwd is None: + cwd = os.getcwd() + if change_ext: + if suffix: + suffix = ''.join((suffix, ext)) + else: + suffix = ext + if suffix is None: + suffix = '' + fname = fname_presuffix(basename, suffix=suffix, + use_ext=False, newpath=cwd) + return fname + + def _gen_filename(self, name): + if name == 'out_file': + return self._list_outputs()['out_file'] + return None diff --git a/nipype/interfaces/tests/test_auto_PETPVC.py b/nipype/interfaces/tests/test_auto_PETPVC.py new file mode 100644 index 0000000000..67c02c72b0 --- /dev/null +++ b/nipype/interfaces/tests/test_auto_PETPVC.py @@ -0,0 +1,65 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ...testing import assert_equal +from ..petpvc import PETPVC + + +def test_PETPVC_inputs(): + input_map = dict(alpha=dict(argstr='-a %.4f', + ), + args=dict(argstr='%s', + ), + debug=dict(argstr='-d', + usedefault=True, + ), + environ=dict(nohash=True, + usedefault=True, + ), + fwhm_x=dict(argstr='-x %.4f', + mandatory=True, + ), + fwhm_y=dict(argstr='-y %.4f', + mandatory=True, + ), + fwhm_z=dict(argstr='-z %.4f', + mandatory=True, + ), + ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_file=dict(argstr='-i %s', + mandatory=True, + ), + mask_file=dict(argstr='-m %s', + mandatory=True, + ), + n_deconv=dict(argstr='-k %d', + ), + n_iter=dict(argstr='-n %d', + ), + out_file=dict(argstr='-o %s', + genfile=True, + hash_files=False, + ), + pvc=dict(argstr='-p %s', + mandatory=True, + ), + stop_crit=dict(argstr='-a %.4f', + ), + terminal_output=dict(nohash=True, + ), + ) + inputs = PETPVC.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + + +def test_PETPVC_outputs(): + output_map = dict(out_file=dict(), + ) + outputs = PETPVC.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(outputs.traits()[key], metakey), value diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 193fb0650a..b14d73a307 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -644,7 +644,13 @@ def _write_report_info(self, workingdir, name, graph): value=1)) save_json(graph_file, json_dict) graph_file = op.join(report_dir, 'graph.json') - template = '%%0%dd_' % np.ceil(np.log10(len(nodes))).astype(int) + # Avoid RuntimeWarning: divide by zero encountered in log10 + num_nodes = len(nodes) + if num_nodes > 0: + index_name = np.ceil(np.log10(num_nodes)).astype(int) + else: + index_name = 0 + template = '%%0%dd_' % index_name def getname(u, i): name_parts = u.fullname.split('.') diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index 96f47fd6ef..2ad7ccf1b6 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -13,7 +13,7 @@ IPython_not_loaded = False try: from IPython import __version__ as IPyversion - from IPython.parallel.error import TimeoutError + from ipyparallel.error import TimeoutError except: IPython_not_loaded = True @@ -47,7 +47,7 @@ class IPythonPlugin(DistributedPluginBase): def __init__(self, plugin_args=None): if IPython_not_loaded: - raise ImportError('IPython parallel could not be imported') + raise ImportError('ipyparallel could not be imported') super(IPythonPlugin, self).__init__(plugin_args=plugin_args) self.iparallel = None self.taskclient = None @@ -56,11 +56,11 @@ def __init__(self, plugin_args=None): def run(self, graph, config, updatehash=False): """Executes a pre-defined pipeline is distributed approaches - based on IPython's parallel processing interface + based on IPython's ipyparallel processing interface """ # retrieve clients again try: - name = 'IPython.parallel' + name = 'ipyparallel' __import__(name) self.iparallel = sys.modules[name] except ImportError: diff --git a/nipype/pipeline/plugins/ipythonx.py b/nipype/pipeline/plugins/ipythonx.py index 7eca73e0ea..61ffd78fba 100644 --- a/nipype/pipeline/plugins/ipythonx.py +++ b/nipype/pipeline/plugins/ipythonx.py @@ -22,14 +22,14 @@ class IPythonXPlugin(DistributedPluginBase): def __init__(self, plugin_args=None): if IPython_not_loaded: - raise ImportError('IPython parallel could not be imported') + raise ImportError('ipyparallel could not be imported') super(IPythonXPlugin, self).__init__(plugin_args=plugin_args) self.ipyclient = None self.taskclient = None def run(self, graph, config, updatehash=False): """Executes a pre-defined pipeline is distributed approaches - based on IPython's parallel processing interface + based on IPython's ipyparallel processing interface """ # retrieve clients again try: diff --git a/nipype/testing/data/pet.nii.gz b/nipype/testing/data/pet.nii.gz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/tissues.nii.gz b/nipype/testing/data/tissues.nii.gz new file mode 100644 index 0000000000..e69de29bb2