From 34a7c1d8d8c6f5b068c534e8e8fe7daa2afd1b17 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 1 Mar 2018 11:12:28 -0500 Subject: [PATCH 1/4] fix: some lgtm errors (#2475) --- nipype/algorithms/misc.py | 5 ++++- nipype/interfaces/ants/segmentation.py | 6 +++--- nipype/interfaces/cmtk/nx.py | 2 +- nipype/interfaces/fsl/preprocess.py | 6 +++--- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index 87b1fae400..64b9ab775b 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1191,7 +1191,7 @@ def _list_outputs(self): return outputs -def normalize_tpms(in_files, in_mask=None, out_files=[]): +def normalize_tpms(in_files, in_mask=None, out_files=None): """ Returns the input tissue probability maps (tpms, aka volume fractions) normalized to sum up 1.0 at each voxel within the mask. @@ -1202,6 +1202,9 @@ def normalize_tpms(in_files, in_mask=None, out_files=[]): in_files = np.atleast_1d(in_files).tolist() + if not out_files: + out_files = [] + if len(out_files) != len(in_files): for i, finname in enumerate(in_files): fname, fext = op.splitext(op.basename(finname)) diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index b82db2e401..a7e0708997 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -150,7 +150,7 @@ def _format_arg(self, opt, spec, val): if isdefined(self.inputs.save_posteriors): retval += ",%s" % self.inputs.output_posteriors_name_template return retval + "]" - return super(ANTSCommand, self)._format_arg(opt, spec, val) + return super(Atropos, self)._format_arg(opt, spec, val) def _run_interface(self, runtime, correct_return_codes=[0]): if self.inputs.initialization == "PriorProbabilityImages": @@ -640,7 +640,7 @@ def _format_arg(self, opt, spec, val): _, _, ext = split_filename(self.inputs.segmentation_priors[0]) retval = "-p nipype_priors/BrainSegmentationPrior%02d" + ext return retval - return super(ANTSCommand, self)._format_arg(opt, spec, val) + return super(CorticalThickness, self)._format_arg(opt, spec, val) def _run_interface(self, runtime, correct_return_codes=[0]): priors_directory = os.path.join(os.getcwd(), "nipype_priors") @@ -1066,7 +1066,7 @@ def _format_arg(self, opt, spec, val): assert len(val) == self.inputs.modalities * len(self.inputs.warped_label_images), \ "Number of intensity images and label maps must be the same {0}!={1}".format( len(val), len(self.inputs.warped_label_images)) - return super(ANTSCommand, self)._format_arg(opt, spec, val) + return super(JointFusion, self)._format_arg(opt, spec, val) return retval def _list_outputs(self): diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index 1072188cbc..4ab948523b 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -139,7 +139,7 @@ def average_networks(in_files, ntwk_res_file, group_id): current = ntwk.edge[edge[0]][edge[1]] data = add_dicts_by_key(current, data) ntwk.add_edge(edge[0], edge[1], **data) - nodes = list(nodes()) + nodes = list(tmp.nodes()) for node in nodes: data = {} data = ntwk.nodes[node] diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index ae24c9c54b..b51eceeec0 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -2025,7 +2025,7 @@ def _list_outputs(self): outputs['bvars'] = self._gen_mesh_names('bvars', structures) return outputs - def _gen_fname(self, name): + def _gen_fname(self, basename): path, outname, ext = split_filename(self.inputs.out_file) method = 'none' @@ -2039,9 +2039,9 @@ def _gen_fname(self, name): thres = '%.4f' % self.inputs.method_as_numerical_threshold method = thres.replace('.', '') - if name == 'original_segmentations': + if basename == 'original_segmentations': return op.abspath('%s_all_%s_origsegs.nii.gz' % (outname, method)) - if name == 'segmentation_file': + if basename == 'segmentation_file': return op.abspath('%s_all_%s_firstseg.nii.gz' % (outname, method)) return None From b9ba722c82d06cc564a48375493b68d4d6b67935 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 8 Mar 2018 14:40:26 -0500 Subject: [PATCH 2/4] BF: more lgtm fixes --- nipype/algorithms/misc.py | 2 +- nipype/info.py | 1 - nipype/interfaces/ants/registration.py | 4 - nipype/interfaces/ants/segmentation.py | 12 -- nipype/interfaces/base/specs.py | 2 + nipype/interfaces/cmtk/cmtk.py | 3 +- .../interfaces/fsl/tests/test_preprocess.py | 6 +- nipype/interfaces/nipy/preprocess.py | 139 ------------------ nipype/pipeline/engine/base.py | 4 - nipype/pipeline/engine/utils.py | 4 +- nipype/pipeline/engine/workflows.py | 1 + nipype/pipeline/plugins/ipython.py | 2 + nipype/pipeline/plugins/ipythonx.py | 95 ------------ nipype/utils/filemanip.py | 12 +- 14 files changed, 16 insertions(+), 271 deletions(-) delete mode 100644 nipype/pipeline/plugins/ipythonx.py diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index 64b9ab775b..01f45eac78 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1202,7 +1202,7 @@ def normalize_tpms(in_files, in_mask=None, out_files=None): in_files = np.atleast_1d(in_files).tolist() - if not out_files: + if out_files is None: out_files = [] if len(out_files) != len(in_files): diff --git a/nipype/info.py b/nipype/info.py index 7cee71d6bd..69a8b132a4 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -120,7 +120,6 @@ def get_nipype_gitversion(): URL = 'http://nipy.org/nipype' DOWNLOAD_URL = 'http://github.com/nipy/nipype/archives/master' LICENSE = 'Apache License, 2.0' -CLASSIFIERS = CLASSIFIERS AUTHOR = 'nipype developers' AUTHOR_EMAIL = 'neuroimaging@python.org' PLATFORMS = 'OS Independent' diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index bcc1d94b89..f0ac2dc85a 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1118,8 +1118,6 @@ def _get_outputfilenames(self, inverse=False): output_filename = self.inputs.output_warped_image if isinstance(output_filename, bool): output_filename = '%s_Warped.nii.gz' % self.inputs.output_transform_prefix - else: - output_filename = output_filename return output_filename inv_output_filename = None if isdefined(self.inputs.output_inverse_warped_image) and \ @@ -1127,8 +1125,6 @@ def _get_outputfilenames(self, inverse=False): inv_output_filename = self.inputs.output_inverse_warped_image if isinstance(inv_output_filename, bool): inv_output_filename = '%s_InverseWarped.nii.gz' % self.inputs.output_transform_prefix - else: - inv_output_filename = inv_output_filename return inv_output_filename def _format_convergence(self, ii): diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index a7e0708997..83e301733b 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -700,12 +700,6 @@ def _list_outputs(self): return outputs -class antsCorticalThickness(CorticalThickness): - DeprecationWarning( - 'This class has been replaced by CorticalThickness and will be removed in version 0.13' - ) - - class BrainExtractionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr='-d %d', usedefault=True, desc='image dimension (2 or 3)') @@ -924,12 +918,6 @@ def _list_outputs(self): return outputs -class antsBrainExtraction(BrainExtraction): - DeprecationWarning( - 'This class has been replaced by BrainExtraction and will be removed in version 0.13' - ) - - class JointFusionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index 3f7676c191..2f0d4bb0e7 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -197,6 +197,7 @@ def _clean_container(self, objekt, undefinedval=None, skipundefined=False): if isinstance(objekt, tuple): out = tuple(out) else: + out = None if isdefined(objekt): out = objekt else: @@ -283,6 +284,7 @@ def _get_sorteddict(self, if isinstance(objekt, tuple): out = tuple(out) else: + out = None if isdefined(objekt): if (hash_files and isinstance(objekt, (str, bytes)) and os.path.isfile(objekt)): diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index c7ca43cd4e..bddd2d2269 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -222,7 +222,8 @@ def cmat(track_file, gp = nx.read_gpickle(resolution_network_file) elif ext == '.graphml': gp = nx.read_graphml(resolution_network_file) - + else: + raise TypeError("Unable to read file:", resolution_network_file) nROIs = len(gp.nodes()) # add node information from parcellation diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index bd75234c62..dc249900fc 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -615,15 +615,15 @@ def test_first_genfname(): first.inputs.out_file = 'segment.nii' first.inputs.output_type = "NIFTI_GZ" - value = first._gen_fname(name='original_segmentations') + value = first._gen_fname(basename='original_segmentations') expected_value = os.path.abspath('segment_all_fast_origsegs.nii.gz') assert value == expected_value first.inputs.method = 'none' - value = first._gen_fname(name='original_segmentations') + value = first._gen_fname(basename='original_segmentations') expected_value = os.path.abspath('segment_all_none_origsegs.nii.gz') assert value == expected_value first.inputs.method = 'auto' first.inputs.list_of_specific_structures = ['L_Hipp', 'R_Hipp'] - value = first._gen_fname(name='original_segmentations') + value = first._gen_fname(basename='original_segmentations') expected_value = os.path.abspath('segment_all_none_origsegs.nii.gz') assert value == expected_value diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index a3566e07a9..357a587c87 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -85,146 +85,7 @@ def _list_outputs(self): return outputs -class FmriRealign4dInputSpec(BaseInterfaceInputSpec): - - in_file = InputMultiPath( - File(exists=True), mandatory=True, desc="File to realign") - tr = traits.Float(desc="TR in seconds", mandatory=True) - slice_order = traits.List( - traits.Int(), - desc=('0 based slice order. This would be equivalent to entering' - 'np.argsort(spm_slice_order) for this field. This effects' - 'interleaved acquisition. This field will be deprecated in' - 'future Nipy releases and be replaced by actual slice' - 'acquisition times.'), - requires=["time_interp"]) - tr_slices = traits.Float(desc="TR slices", requires=['time_interp']) - start = traits.Float( - 0.0, usedefault=True, desc="time offset into TR to align slices to") - time_interp = traits.Enum( - True, - requires=["slice_order"], - desc="Assume smooth changes across time e.g.,\ - fmri series. If you don't want slice timing \ - correction set this to undefined") - loops = InputMultiPath( - [5], traits.Int, usedefault=True, desc="loops within each run") - between_loops = InputMultiPath( - [5], - traits.Int, - usedefault=True, - desc="loops used to \ - realign different \ - runs") - speedup = InputMultiPath( - [5], - traits.Int, - usedefault=True, - desc="successive image \ - sub-sampling factors \ - for acceleration") - - -class FmriRealign4dOutputSpec(TraitedSpec): - - out_file = OutputMultiPath(File(exists=True), desc="Realigned files") - par_file = OutputMultiPath( - File(exists=True), desc="Motion parameter files") - - -class FmriRealign4d(BaseInterface): - """Simultaneous motion and slice timing correction algorithm - - This interface wraps nipy's FmriRealign4d algorithm [1]_. - - Examples - -------- - >>> from nipype.interfaces.nipy.preprocess import FmriRealign4d - >>> realigner = FmriRealign4d() - >>> realigner.inputs.in_file = ['functional.nii'] - >>> realigner.inputs.tr = 2 - >>> realigner.inputs.slice_order = list(range(0,67)) - >>> res = realigner.run() # doctest: +SKIP - - References - ---------- - .. [1] Roche A. A four-dimensional registration algorithm with \ - application to joint correction of motion and slice timing \ - in fMRI. IEEE Trans Med Imaging. 2011 Aug;30(8):1546-54. DOI_. - - .. _DOI: http://dx.doi.org/10.1109/TMI.2011.2131152 - - """ - - input_spec = FmriRealign4dInputSpec - output_spec = FmriRealign4dOutputSpec - keywords = ['slice timing', 'motion correction'] - - def __init__(self, **inputs): - DeprecationWarning(('Will be deprecated in release 0.13. Please use' - 'SpaceTimeRealigner')) - BaseInterface.__init__(self, **inputs) - - def _run_interface(self, runtime): - from nipy.algorithms.registration import FmriRealign4d as FR4d - all_ims = [load_image(fname) for fname in self.inputs.in_file] - - if not isdefined(self.inputs.tr_slices): - TR_slices = None - else: - TR_slices = self.inputs.tr_slices - - R = FR4d( - all_ims, - tr=self.inputs.tr, - slice_order=self.inputs.slice_order, - tr_slices=TR_slices, - time_interp=self.inputs.time_interp, - start=self.inputs.start) - - R.estimate( - loops=list(self.inputs.loops), - between_loops=list(self.inputs.between_loops), - speedup=list(self.inputs.speedup)) - - corr_run = R.resample() - self._out_file_path = [] - self._par_file_path = [] - - for j, corr in enumerate(corr_run): - self._out_file_path.append( - os.path.abspath('corr_%s.nii.gz' % - (split_filename(self.inputs.in_file[j])[1]))) - save_image(corr, self._out_file_path[j]) - - self._par_file_path.append( - os.path.abspath('%s.par' % - (os.path.split(self.inputs.in_file[j])[1]))) - mfile = open(self._par_file_path[j], 'w') - motion = R._transforms[j] - # nipy does not encode euler angles. return in original form of - # translation followed by rotation vector see: - # http://en.wikipedia.org/wiki/Rodrigues'_rotation_formula - for i, mo in enumerate(motion): - params = [ - '%.10f' % item - for item in np.hstack((mo.translation, mo.rotation)) - ] - string = ' '.join(params) + '\n' - mfile.write(string) - mfile.close() - - return runtime - - def _list_outputs(self): - outputs = self._outputs().get() - outputs['out_file'] = self._out_file_path - outputs['par_file'] = self._par_file_path - return outputs - - class SpaceTimeRealignerInputSpec(BaseInterfaceInputSpec): - in_file = InputMultiPath( File(exists=True), mandatory=True, diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index 0883023f63..72094ed10f 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -109,8 +109,4 @@ def save(self, filename=None): savepkl(filename, self) def load(self, filename): - if '.npz' in filename: - DeprecationWarning(('npz files will be deprecated in the next ' - 'release. you can use numpy to open them.')) - return np.load(filename) return loadpkl(filename) diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 2b6bb6ed39..964419b5bd 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -280,7 +280,7 @@ def load_resultfile(path, name): except UnicodeDecodeError: # Was this pickle created with Python 2.x? pickle.load(pkl_file, fix_imports=True, encoding='utf-8') - logger.warning('Successfully loaded pickle in compatibility mode') + logger.warning('Successfully loaded pkl in compatibility mode') except (traits.TraitError, AttributeError, ImportError, EOFError) as err: if isinstance(err, (AttributeError, ImportError)): @@ -439,6 +439,8 @@ def modify_paths(object, relative=True, basedir=None): raise IOError('File %s not found' % out) else: out = object + else: + raise TypeError("Object {} is undefined".format(object)) return out diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index c6a9047337..34a89a0a7b 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -496,6 +496,7 @@ def export(self, flatgraph = self._create_flat_graph() nodes = nx.topological_sort(flatgraph) + all_lines = None lines = ['# Workflow'] importlines = [ 'from nipype.pipeline.engine import Workflow, ' diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index 0ac21b04f8..aa20f935c1 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -39,6 +39,8 @@ def execute_task(pckld_task, node_config, updatehash): result = task.run(updatehash=updatehash) except: traceback = format_exc() + from pickle import loads + task = loads(pckld_task) result = task.result os.chdir(cwd) return result, traceback, gethostname() diff --git a/nipype/pipeline/plugins/ipythonx.py b/nipype/pipeline/plugins/ipythonx.py deleted file mode 100644 index 2ab583314d..0000000000 --- a/nipype/pipeline/plugins/ipythonx.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Parallel workflow execution via IPython controller -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -import sys -from future.utils import raise_from - -from ... import LooseVersion -from .base import (DistributedPluginBase, logger, report_crash) - -IPython_not_loaded = False -try: - from IPython import __version__ as IPyversion - if LooseVersion(IPyversion) < LooseVersion('0.11'): - from IPython.kernel.contexts import ConnectionRefusedError -except ImportError: - IPython_not_loaded = True - - -class IPythonXPlugin(DistributedPluginBase): - """Execute workflow with ipython - """ - - def __init__(self, plugin_args=None): - if LooseVersion(IPyversion) > LooseVersion('0.10.1'): - raise EnvironmentError(('The IPythonX plugin can only be used with' - ' older IPython versions. Please use the ' - 'IPython plugin instead.')) - DeprecationWarning('This plugin will be deprecated as of version 0.13') - if IPython_not_loaded: - raise ImportError('ipyparallel could not be imported') - super(IPythonXPlugin, self).__init__(plugin_args=plugin_args) - self.ipyclient = None - self.taskclient = None - - def run(self, graph, config, updatehash=False): - """Executes a pre-defined pipeline is distributed approaches - based on IPython's ipyparallel processing interface - """ - # retrieve clients again - try: - name = 'IPython.kernel.client' - __import__(name) - self.ipyclient = sys.modules[name] - except ImportError as e: - raise_from( - ImportError("Ipython kernel not found. Parallel " - "execution will be unavailable"), e) - try: - self.taskclient = self.ipyclient.TaskClient() - except Exception as e: - if isinstance(e, ConnectionRefusedError): - raise_from(Exception("No IPython clients found."), e) - if isinstance(e, ValueError): - raise_from(Exception("Ipython kernel not installed"), e) - return super(IPythonXPlugin, self).run( - graph, config, updatehash=updatehash) - - def _get_result(self, taskid): - return self.taskclient.get_task_result(taskid, block=False) - - def _submit_job(self, node, updatehash=False): - cmdstr = """import sys -from traceback import format_exception -traceback=None -result=None -try: - result = task.run(updatehash=updatehash) -except: - etype, eval, etr = sys.exc_info() - traceback = format_exception(etype,eval,etr) - result = task.result -""" - task = self.ipyclient.StringTask( - cmdstr, - push=dict(task=node, updatehash=updatehash), - pull=['result', 'traceback']) - return self.taskclient.run(task, block=False) - - def _report_crash(self, node, result=None): - if result and result['traceback']: - node._result = result['result'] - node._traceback = result['traceback'] - return report_crash(node, traceback=result['traceback']) - else: - return report_crash(node) - - def _clear_task(self, taskid): - if IPyversion >= '0.10.1': - logger.debug("Clearing id: %d" % taskid) - self.taskclient.clear(taskid) diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index 4b4942227e..a0711efb2d 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -424,6 +424,8 @@ def copyfile(originalfile, hashfn = hash_timestamp elif hashmethod == 'content': hashfn = hash_infile + else: + raise AttributeError("Unknown hash method found:", hashmethod) newhash = hashfn(newfile) fmlogger.debug('File: %s already exists,%s, copy:%d', newfile, newhash, copy) @@ -619,16 +621,6 @@ def load_json(filename): def loadcrash(infile, *args): if '.pkl' in infile: return loadpkl(infile) - elif '.npz' in infile: - DeprecationWarning(('npz files will be deprecated in the next ' - 'release. you can use numpy to open them.')) - data = np.load(infile) - out = {} - for k in data.files: - out[k] = [f for f in data[k].flat] - if len(out[k]) == 1: - out[k] = out[k].pop() - return out else: raise ValueError('Only pickled crashfiles are supported') From 8585d5132861f7ab8f05f62fc5a0ebe0fdc95940 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 8 Mar 2018 14:49:36 -0500 Subject: [PATCH 3/4] fix: remove imports after interface cleaning --- doc/users/nipypecmd.rst | 3 +- doc/users/plugins.rst | 4 -- nipype/interfaces/nipy/__init__.py | 2 +- .../nipy/tests/test_auto_FmriRealign4d.py | 37 -------------- nipype/pipeline/plugins/__init__.py | 1 - nipype/utils/tests/test_cmd.py | 48 ------------------- 6 files changed, 2 insertions(+), 93 deletions(-) delete mode 100644 nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py diff --git a/doc/users/nipypecmd.rst b/doc/users/nipypecmd.rst index a986804c9c..3717306920 100644 --- a/doc/users/nipypecmd.rst +++ b/doc/users/nipypecmd.rst @@ -10,7 +10,7 @@ This is especially useful when running Interfaces wrapping code that does not ha command line equivalents (nipy or SPM). Being able to run Nipype interfaces opens new possibilities such as inclusion of SPM processing steps in bash scripts. -To run Nipype Interafces you need to use the nipype_cmd tool that should already be installed. +To run Nipype Interfaces you need to use the nipype_cmd tool that should already be installed. The tool allows you to list Interfaces available in a certain package: .. testcode:: @@ -24,7 +24,6 @@ The tool allows you to list Interfaces available in a certain package: ComputeMask FitGLM EstimateContrast - FmriRealign4d After selecting a particular Interface you can learn what inputs it requires: diff --git a/doc/users/plugins.rst b/doc/users/plugins.rst index e655e5f6db..1484247b7e 100644 --- a/doc/users/plugins.rst +++ b/doc/users/plugins.rst @@ -103,10 +103,6 @@ machinery. .. note:: - We provide backward compatibility with IPython_ versions earlier than - 0.10.1 using the IPythonX plugin. This plugin will be deprecated as of - version 0.13 of Nipype. - Please read the IPython_ documentation to determine how to setup your cluster for distributed processing. This typically involves calling ipcluster. diff --git a/nipype/interfaces/nipy/__init__.py b/nipype/interfaces/nipy/__init__.py index 647850fe1b..19d030b61a 100644 --- a/nipype/interfaces/nipy/__init__.py +++ b/nipype/interfaces/nipy/__init__.py @@ -1,4 +1,4 @@ # -*- coding: utf-8 -*- from .model import FitGLM, EstimateContrast -from .preprocess import ComputeMask, FmriRealign4d, SpaceTimeRealigner +from .preprocess import ComputeMask, SpaceTimeRealigner from .utils import Similarity diff --git a/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py b/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py deleted file mode 100644 index eea799c486..0000000000 --- a/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py +++ /dev/null @@ -1,37 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals -from ..preprocess import FmriRealign4d - - -def test_FmriRealign4d_inputs(): - input_map = dict( - between_loops=dict(usedefault=True, ), - ignore_exception=dict( - deprecated='1.0.0', - nohash=True, - usedefault=True, - ), - in_file=dict(mandatory=True, ), - loops=dict(usedefault=True, ), - slice_order=dict(requires=['time_interp'], ), - speedup=dict(usedefault=True, ), - start=dict(usedefault=True, ), - time_interp=dict(requires=['slice_order'], ), - tr=dict(mandatory=True, ), - tr_slices=dict(requires=['time_interp'], ), - ) - inputs = FmriRealign4d.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(inputs.traits()[key], metakey) == value -def test_FmriRealign4d_outputs(): - output_map = dict( - out_file=dict(), - par_file=dict(), - ) - outputs = FmriRealign4d.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/pipeline/plugins/__init__.py b/nipype/pipeline/plugins/__init__.py index fd93146adb..6d2467afdf 100644 --- a/nipype/pipeline/plugins/__init__.py +++ b/nipype/pipeline/plugins/__init__.py @@ -6,7 +6,6 @@ from .debug import DebugPlugin from .linear import LinearPlugin -from .ipythonx import IPythonXPlugin from .pbs import PBSPlugin from .oar import OARPlugin from .sge import SGEPlugin diff --git a/nipype/utils/tests/test_cmd.py b/nipype/utils/tests/test_cmd.py index 5cc704c51d..0e16e0aad8 100644 --- a/nipype/utils/tests/test_cmd.py +++ b/nipype/utils/tests/test_cmd.py @@ -88,54 +88,6 @@ def test_list_nipy_interfacesp(self): \tComputeMask \tEstimateContrast \tFitGLM -\tFmriRealign4d \tSimilarity \tSpaceTimeRealigner """ - - def test_run_4d_realign_without_arguments(self): - with pytest.raises(SystemExit) as cm: - with capture_sys_output() as (stdout, stderr): - nipype_cmd.main( - ['nipype_cmd', 'nipype.interfaces.nipy', 'FmriRealign4d']) - - exit_exception = cm.value - assert exit_exception.code == 2 - - error_message = """usage: nipype_cmd nipype.interfaces.nipy FmriRealign4d [-h] - [--between_loops [BETWEEN_LOOPS [BETWEEN_LOOPS ...]]] - [--ignore_exception] - [--loops [LOOPS [LOOPS ...]]] - [--slice_order SLICE_ORDER] - [--speedup [SPEEDUP [SPEEDUP ...]]] - [--start START] - [--time_interp TIME_INTERP] - [--tr_slices TR_SLICES] - in_file [in_file ...] - tr""" - - if not PY2: - error_message += """ -nipype_cmd nipype.interfaces.nipy FmriRealign4d: error: the following arguments are required: in_file, tr -""" - else: - error_message += """ -nipype_cmd nipype.interfaces.nipy FmriRealign4d: error: too few arguments -""" - - assert stderr.getvalue() == error_message - assert stdout.getvalue() == '' - - def test_run_4d_realign_help(self): - with pytest.raises(SystemExit) as cm: - with capture_sys_output() as (stdout, stderr): - nipype_cmd.main([ - 'nipype_cmd', 'nipype.interfaces.nipy', 'FmriRealign4d', - '-h' - ]) - - exit_exception = cm.value - assert exit_exception.code == 0 - - assert stderr.getvalue() == '' - assert "Run FmriRealign4d" in stdout.getvalue() From 5007c996e52af3f7f8f7b3cf806988dfe2249293 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 8 Mar 2018 15:52:43 -0500 Subject: [PATCH 4/4] fix: remove lingering autotests --- .../tests/test_auto_antsBrainExtraction.py | 87 ----------------- .../tests/test_auto_antsCorticalThickness.py | 97 ------------------- 2 files changed, 184 deletions(-) delete mode 100644 nipype/interfaces/ants/tests/test_auto_antsBrainExtraction.py delete mode 100644 nipype/interfaces/ants/tests/test_auto_antsCorticalThickness.py diff --git a/nipype/interfaces/ants/tests/test_auto_antsBrainExtraction.py b/nipype/interfaces/ants/tests/test_auto_antsBrainExtraction.py deleted file mode 100644 index 19f42b7c2d..0000000000 --- a/nipype/interfaces/ants/tests/test_auto_antsBrainExtraction.py +++ /dev/null @@ -1,87 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals -from ..segmentation import antsBrainExtraction - - -def test_antsBrainExtraction_inputs(): - input_map = dict( - anatomical_image=dict( - argstr='-a %s', - mandatory=True, - ), - args=dict(argstr='%s', ), - brain_probability_mask=dict( - argstr='-m %s', - copyfile=False, - mandatory=True, - ), - brain_template=dict( - argstr='-e %s', - mandatory=True, - ), - debug=dict(argstr='-z 1', ), - dimension=dict( - argstr='-d %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - extraction_registration_mask=dict(argstr='-f %s', ), - ignore_exception=dict( - deprecated='1.0.0', - nohash=True, - usedefault=True, - ), - image_suffix=dict( - argstr='-s %s', - usedefault=True, - ), - keep_temporary_files=dict(argstr='-k %d', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_prefix=dict( - argstr='-o %s', - usedefault=True, - ), - terminal_output=dict( - deprecated='1.0.0', - nohash=True, - ), - use_floatingpoint_precision=dict(argstr='-q %d', ), - use_random_seeding=dict(argstr='-u %d', ), - ) - inputs = antsBrainExtraction.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(inputs.traits()[key], metakey) == value -def test_antsBrainExtraction_outputs(): - output_map = dict( - BrainExtractionBrain=dict(), - BrainExtractionCSF=dict(), - BrainExtractionGM=dict(), - BrainExtractionInitialAffine=dict(), - BrainExtractionInitialAffineFixed=dict(), - BrainExtractionInitialAffineMoving=dict(), - BrainExtractionLaplacian=dict(), - BrainExtractionMask=dict(), - BrainExtractionPrior0GenericAffine=dict(), - BrainExtractionPrior1InverseWarp=dict(), - BrainExtractionPrior1Warp=dict(), - BrainExtractionPriorWarped=dict(), - BrainExtractionSegmentation=dict(), - BrainExtractionTemplateLaplacian=dict(), - BrainExtractionTmp=dict(), - BrainExtractionWM=dict(), - N4Corrected0=dict(), - N4Truncated0=dict(), - ) - outputs = antsBrainExtraction.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/ants/tests/test_auto_antsCorticalThickness.py b/nipype/interfaces/ants/tests/test_auto_antsCorticalThickness.py deleted file mode 100644 index 4fccadacce..0000000000 --- a/nipype/interfaces/ants/tests/test_auto_antsCorticalThickness.py +++ /dev/null @@ -1,97 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals -from ..segmentation import antsCorticalThickness - - -def test_antsCorticalThickness_inputs(): - input_map = dict( - anatomical_image=dict( - argstr='-a %s', - mandatory=True, - ), - args=dict(argstr='%s', ), - b_spline_smoothing=dict(argstr='-v', ), - brain_probability_mask=dict( - argstr='-m %s', - copyfile=False, - mandatory=True, - ), - brain_template=dict( - argstr='-e %s', - mandatory=True, - ), - cortical_label_image=dict(), - debug=dict(argstr='-z 1', ), - dimension=dict( - argstr='-d %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - extraction_registration_mask=dict(argstr='-f %s', ), - ignore_exception=dict( - deprecated='1.0.0', - nohash=True, - usedefault=True, - ), - image_suffix=dict( - argstr='-s %s', - usedefault=True, - ), - keep_temporary_files=dict(argstr='-k %d', ), - label_propagation=dict(argstr='-l %s', ), - max_iterations=dict(argstr='-i %d', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_prefix=dict( - argstr='-o %s', - usedefault=True, - ), - posterior_formulation=dict(argstr='-b %s', ), - prior_segmentation_weight=dict(argstr='-w %f', ), - quick_registration=dict(argstr='-q 1', ), - segmentation_iterations=dict(argstr='-n %d', ), - segmentation_priors=dict( - argstr='-p %s', - mandatory=True, - ), - t1_registration_template=dict( - argstr='-t %s', - mandatory=True, - ), - terminal_output=dict( - deprecated='1.0.0', - nohash=True, - ), - use_floatingpoint_precision=dict(argstr='-j %d', ), - use_random_seeding=dict(argstr='-u %d', ), - ) - inputs = antsCorticalThickness.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(inputs.traits()[key], metakey) == value -def test_antsCorticalThickness_outputs(): - output_map = dict( - BrainExtractionMask=dict(), - BrainSegmentation=dict(), - BrainSegmentationN4=dict(), - BrainSegmentationPosteriors=dict(), - BrainVolumes=dict(), - CorticalThickness=dict(), - CorticalThicknessNormedToTemplate=dict(), - SubjectToTemplate0GenericAffine=dict(), - SubjectToTemplate1Warp=dict(), - SubjectToTemplateLogJacobian=dict(), - TemplateToSubject0Warp=dict(), - TemplateToSubject1GenericAffine=dict(), - ) - outputs = antsCorticalThickness.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(outputs.traits()[key], metakey) == value