diff --git a/doc/users/nipypecmd.rst b/doc/users/nipypecmd.rst index a986804c9c..3717306920 100644 --- a/doc/users/nipypecmd.rst +++ b/doc/users/nipypecmd.rst @@ -10,7 +10,7 @@ This is especially useful when running Interfaces wrapping code that does not ha command line equivalents (nipy or SPM). Being able to run Nipype interfaces opens new possibilities such as inclusion of SPM processing steps in bash scripts. -To run Nipype Interafces you need to use the nipype_cmd tool that should already be installed. +To run Nipype Interfaces you need to use the nipype_cmd tool that should already be installed. The tool allows you to list Interfaces available in a certain package: .. testcode:: @@ -24,7 +24,6 @@ The tool allows you to list Interfaces available in a certain package: ComputeMask FitGLM EstimateContrast - FmriRealign4d After selecting a particular Interface you can learn what inputs it requires: diff --git a/doc/users/plugins.rst b/doc/users/plugins.rst index e655e5f6db..1484247b7e 100644 --- a/doc/users/plugins.rst +++ b/doc/users/plugins.rst @@ -103,10 +103,6 @@ machinery. .. note:: - We provide backward compatibility with IPython_ versions earlier than - 0.10.1 using the IPythonX plugin. This plugin will be deprecated as of - version 0.13 of Nipype. - Please read the IPython_ documentation to determine how to setup your cluster for distributed processing. This typically involves calling ipcluster. diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index 7b9d055181..de7be5d264 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1184,7 +1184,7 @@ def _list_outputs(self): return outputs -def normalize_tpms(in_files, in_mask=None, out_files=[]): +def normalize_tpms(in_files, in_mask=None, out_files=None): """ Returns the input tissue probability maps (tpms, aka volume fractions) normalized to sum up 1.0 at each voxel within the mask. @@ -1195,6 +1195,9 @@ def normalize_tpms(in_files, in_mask=None, out_files=[]): in_files = np.atleast_1d(in_files).tolist() + if out_files is None: + out_files = [] + if len(out_files) != len(in_files): for i, finname in enumerate(in_files): fname, fext = op.splitext(op.basename(finname)) diff --git a/nipype/info.py b/nipype/info.py index 82f1e10150..9438567ff3 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -120,7 +120,6 @@ def get_nipype_gitversion(): URL = 'http://nipy.org/nipype' DOWNLOAD_URL = 'http://github.com/nipy/nipype/archives/master' LICENSE = 'Apache License, 2.0' -CLASSIFIERS = CLASSIFIERS AUTHOR = 'nipype developers' AUTHOR_EMAIL = 'neuroimaging@python.org' PLATFORMS = 'OS Independent' diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 4fa9d0e12f..c2a5348ff4 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1112,8 +1112,6 @@ def _get_outputfilenames(self, inverse=False): output_filename = self.inputs.output_warped_image if isinstance(output_filename, bool): output_filename = '%s_Warped.nii.gz' % self.inputs.output_transform_prefix - else: - output_filename = output_filename return output_filename inv_output_filename = None if isdefined(self.inputs.output_inverse_warped_image) and \ @@ -1121,8 +1119,6 @@ def _get_outputfilenames(self, inverse=False): inv_output_filename = self.inputs.output_inverse_warped_image if isinstance(inv_output_filename, bool): inv_output_filename = '%s_InverseWarped.nii.gz' % self.inputs.output_transform_prefix - else: - inv_output_filename = inv_output_filename return inv_output_filename def _format_convergence(self, ii): diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index b3821c1425..7d798f9c2d 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -143,7 +143,7 @@ def _format_arg(self, opt, spec, val): if isdefined(self.inputs.save_posteriors): retval += ",%s" % self.inputs.output_posteriors_name_template return retval + "]" - return super(ANTSCommand, self)._format_arg(opt, spec, val) + return super(Atropos, self)._format_arg(opt, spec, val) def _run_interface(self, runtime, correct_return_codes=[0]): if self.inputs.initialization == "PriorProbabilityImages": @@ -633,7 +633,7 @@ def _format_arg(self, opt, spec, val): _, _, ext = split_filename(self.inputs.segmentation_priors[0]) retval = "-p nipype_priors/BrainSegmentationPrior%02d" + ext return retval - return super(ANTSCommand, self)._format_arg(opt, spec, val) + return super(CorticalThickness, self)._format_arg(opt, spec, val) def _run_interface(self, runtime, correct_return_codes=[0]): priors_directory = os.path.join(os.getcwd(), "nipype_priors") @@ -693,12 +693,6 @@ def _list_outputs(self): return outputs -class antsCorticalThickness(CorticalThickness): - DeprecationWarning( - 'This class has been replaced by CorticalThickness and will be removed in version 0.13' - ) - - class BrainExtractionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr='-d %d', usedefault=True, desc='image dimension (2 or 3)') @@ -917,12 +911,6 @@ def _list_outputs(self): return outputs -class antsBrainExtraction(BrainExtraction): - DeprecationWarning( - 'This class has been replaced by BrainExtraction and will be removed in version 0.13' - ) - - class JointFusionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, @@ -1059,7 +1047,7 @@ def _format_arg(self, opt, spec, val): assert len(val) == self.inputs.modalities * len(self.inputs.warped_label_images), \ "Number of intensity images and label maps must be the same {0}!={1}".format( len(val), len(self.inputs.warped_label_images)) - return super(ANTSCommand, self)._format_arg(opt, spec, val) + return super(JointFusion, self)._format_arg(opt, spec, val) return retval def _list_outputs(self): diff --git a/nipype/interfaces/ants/tests/test_auto_antsBrainExtraction.py b/nipype/interfaces/ants/tests/test_auto_antsBrainExtraction.py deleted file mode 100644 index 19f42b7c2d..0000000000 --- a/nipype/interfaces/ants/tests/test_auto_antsBrainExtraction.py +++ /dev/null @@ -1,87 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals -from ..segmentation import antsBrainExtraction - - -def test_antsBrainExtraction_inputs(): - input_map = dict( - anatomical_image=dict( - argstr='-a %s', - mandatory=True, - ), - args=dict(argstr='%s', ), - brain_probability_mask=dict( - argstr='-m %s', - copyfile=False, - mandatory=True, - ), - brain_template=dict( - argstr='-e %s', - mandatory=True, - ), - debug=dict(argstr='-z 1', ), - dimension=dict( - argstr='-d %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - extraction_registration_mask=dict(argstr='-f %s', ), - ignore_exception=dict( - deprecated='1.0.0', - nohash=True, - usedefault=True, - ), - image_suffix=dict( - argstr='-s %s', - usedefault=True, - ), - keep_temporary_files=dict(argstr='-k %d', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_prefix=dict( - argstr='-o %s', - usedefault=True, - ), - terminal_output=dict( - deprecated='1.0.0', - nohash=True, - ), - use_floatingpoint_precision=dict(argstr='-q %d', ), - use_random_seeding=dict(argstr='-u %d', ), - ) - inputs = antsBrainExtraction.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(inputs.traits()[key], metakey) == value -def test_antsBrainExtraction_outputs(): - output_map = dict( - BrainExtractionBrain=dict(), - BrainExtractionCSF=dict(), - BrainExtractionGM=dict(), - BrainExtractionInitialAffine=dict(), - BrainExtractionInitialAffineFixed=dict(), - BrainExtractionInitialAffineMoving=dict(), - BrainExtractionLaplacian=dict(), - BrainExtractionMask=dict(), - BrainExtractionPrior0GenericAffine=dict(), - BrainExtractionPrior1InverseWarp=dict(), - BrainExtractionPrior1Warp=dict(), - BrainExtractionPriorWarped=dict(), - BrainExtractionSegmentation=dict(), - BrainExtractionTemplateLaplacian=dict(), - BrainExtractionTmp=dict(), - BrainExtractionWM=dict(), - N4Corrected0=dict(), - N4Truncated0=dict(), - ) - outputs = antsBrainExtraction.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/ants/tests/test_auto_antsCorticalThickness.py b/nipype/interfaces/ants/tests/test_auto_antsCorticalThickness.py deleted file mode 100644 index 4fccadacce..0000000000 --- a/nipype/interfaces/ants/tests/test_auto_antsCorticalThickness.py +++ /dev/null @@ -1,97 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals -from ..segmentation import antsCorticalThickness - - -def test_antsCorticalThickness_inputs(): - input_map = dict( - anatomical_image=dict( - argstr='-a %s', - mandatory=True, - ), - args=dict(argstr='%s', ), - b_spline_smoothing=dict(argstr='-v', ), - brain_probability_mask=dict( - argstr='-m %s', - copyfile=False, - mandatory=True, - ), - brain_template=dict( - argstr='-e %s', - mandatory=True, - ), - cortical_label_image=dict(), - debug=dict(argstr='-z 1', ), - dimension=dict( - argstr='-d %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - extraction_registration_mask=dict(argstr='-f %s', ), - ignore_exception=dict( - deprecated='1.0.0', - nohash=True, - usedefault=True, - ), - image_suffix=dict( - argstr='-s %s', - usedefault=True, - ), - keep_temporary_files=dict(argstr='-k %d', ), - label_propagation=dict(argstr='-l %s', ), - max_iterations=dict(argstr='-i %d', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_prefix=dict( - argstr='-o %s', - usedefault=True, - ), - posterior_formulation=dict(argstr='-b %s', ), - prior_segmentation_weight=dict(argstr='-w %f', ), - quick_registration=dict(argstr='-q 1', ), - segmentation_iterations=dict(argstr='-n %d', ), - segmentation_priors=dict( - argstr='-p %s', - mandatory=True, - ), - t1_registration_template=dict( - argstr='-t %s', - mandatory=True, - ), - terminal_output=dict( - deprecated='1.0.0', - nohash=True, - ), - use_floatingpoint_precision=dict(argstr='-j %d', ), - use_random_seeding=dict(argstr='-u %d', ), - ) - inputs = antsCorticalThickness.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(inputs.traits()[key], metakey) == value -def test_antsCorticalThickness_outputs(): - output_map = dict( - BrainExtractionMask=dict(), - BrainSegmentation=dict(), - BrainSegmentationN4=dict(), - BrainSegmentationPosteriors=dict(), - BrainVolumes=dict(), - CorticalThickness=dict(), - CorticalThicknessNormedToTemplate=dict(), - SubjectToTemplate0GenericAffine=dict(), - SubjectToTemplate1Warp=dict(), - SubjectToTemplateLogJacobian=dict(), - TemplateToSubject0Warp=dict(), - TemplateToSubject1GenericAffine=dict(), - ) - outputs = antsCorticalThickness.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index 3f7676c191..2f0d4bb0e7 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -197,6 +197,7 @@ def _clean_container(self, objekt, undefinedval=None, skipundefined=False): if isinstance(objekt, tuple): out = tuple(out) else: + out = None if isdefined(objekt): out = objekt else: @@ -283,6 +284,7 @@ def _get_sorteddict(self, if isinstance(objekt, tuple): out = tuple(out) else: + out = None if isdefined(objekt): if (hash_files and isinstance(objekt, (str, bytes)) and os.path.isfile(objekt)): diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index a8352a1b40..0510c728ef 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -214,7 +214,8 @@ def cmat(track_file, gp = nx.read_gpickle(resolution_network_file) elif ext == '.graphml': gp = nx.read_graphml(resolution_network_file) - + else: + raise TypeError("Unable to read file:", resolution_network_file) nROIs = len(gp.nodes()) # add node information from parcellation diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index 7316d555b1..bbc5de77b9 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -131,7 +131,7 @@ def average_networks(in_files, ntwk_res_file, group_id): current = ntwk.edge[edge[0]][edge[1]] data = add_dicts_by_key(current, data) ntwk.add_edge(edge[0], edge[1], **data) - nodes = list(nodes()) + nodes = list(tmp.nodes()) for node in nodes: data = {} data = ntwk.nodes[node] diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index 77f6c31314..be5891c6cf 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -2019,7 +2019,7 @@ def _list_outputs(self): outputs['bvars'] = self._gen_mesh_names('bvars', structures) return outputs - def _gen_fname(self, name): + def _gen_fname(self, basename): path, outname, ext = split_filename(self.inputs.out_file) method = 'none' @@ -2033,9 +2033,9 @@ def _gen_fname(self, name): thres = '%.4f' % self.inputs.method_as_numerical_threshold method = thres.replace('.', '') - if name == 'original_segmentations': + if basename == 'original_segmentations': return op.abspath('%s_all_%s_origsegs.nii.gz' % (outname, method)) - if name == 'segmentation_file': + if basename == 'segmentation_file': return op.abspath('%s_all_%s_firstseg.nii.gz' % (outname, method)) return None diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index bd75234c62..dc249900fc 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -615,15 +615,15 @@ def test_first_genfname(): first.inputs.out_file = 'segment.nii' first.inputs.output_type = "NIFTI_GZ" - value = first._gen_fname(name='original_segmentations') + value = first._gen_fname(basename='original_segmentations') expected_value = os.path.abspath('segment_all_fast_origsegs.nii.gz') assert value == expected_value first.inputs.method = 'none' - value = first._gen_fname(name='original_segmentations') + value = first._gen_fname(basename='original_segmentations') expected_value = os.path.abspath('segment_all_none_origsegs.nii.gz') assert value == expected_value first.inputs.method = 'auto' first.inputs.list_of_specific_structures = ['L_Hipp', 'R_Hipp'] - value = first._gen_fname(name='original_segmentations') + value = first._gen_fname(basename='original_segmentations') expected_value = os.path.abspath('segment_all_none_origsegs.nii.gz') assert value == expected_value diff --git a/nipype/interfaces/nipy/__init__.py b/nipype/interfaces/nipy/__init__.py index 647850fe1b..19d030b61a 100644 --- a/nipype/interfaces/nipy/__init__.py +++ b/nipype/interfaces/nipy/__init__.py @@ -1,4 +1,4 @@ # -*- coding: utf-8 -*- from .model import FitGLM, EstimateContrast -from .preprocess import ComputeMask, FmriRealign4d, SpaceTimeRealigner +from .preprocess import ComputeMask, SpaceTimeRealigner from .utils import Similarity diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index 973344af94..6a73fbca30 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -77,146 +77,7 @@ def _list_outputs(self): return outputs -class FmriRealign4dInputSpec(BaseInterfaceInputSpec): - - in_file = InputMultiPath( - File(exists=True), mandatory=True, desc="File to realign") - tr = traits.Float(desc="TR in seconds", mandatory=True) - slice_order = traits.List( - traits.Int(), - desc=('0 based slice order. This would be equivalent to entering' - 'np.argsort(spm_slice_order) for this field. This effects' - 'interleaved acquisition. This field will be deprecated in' - 'future Nipy releases and be replaced by actual slice' - 'acquisition times.'), - requires=["time_interp"]) - tr_slices = traits.Float(desc="TR slices", requires=['time_interp']) - start = traits.Float( - 0.0, usedefault=True, desc="time offset into TR to align slices to") - time_interp = traits.Enum( - True, - requires=["slice_order"], - desc="Assume smooth changes across time e.g.,\ - fmri series. If you don't want slice timing \ - correction set this to undefined") - loops = InputMultiPath( - [5], traits.Int, usedefault=True, desc="loops within each run") - between_loops = InputMultiPath( - [5], - traits.Int, - usedefault=True, - desc="loops used to \ - realign different \ - runs") - speedup = InputMultiPath( - [5], - traits.Int, - usedefault=True, - desc="successive image \ - sub-sampling factors \ - for acceleration") - - -class FmriRealign4dOutputSpec(TraitedSpec): - - out_file = OutputMultiPath(File(exists=True), desc="Realigned files") - par_file = OutputMultiPath( - File(exists=True), desc="Motion parameter files") - - -class FmriRealign4d(BaseInterface): - """Simultaneous motion and slice timing correction algorithm - - This interface wraps nipy's FmriRealign4d algorithm [1]_. - - Examples - -------- - >>> from nipype.interfaces.nipy.preprocess import FmriRealign4d - >>> realigner = FmriRealign4d() - >>> realigner.inputs.in_file = ['functional.nii'] - >>> realigner.inputs.tr = 2 - >>> realigner.inputs.slice_order = list(range(0,67)) - >>> res = realigner.run() # doctest: +SKIP - - References - ---------- - .. [1] Roche A. A four-dimensional registration algorithm with \ - application to joint correction of motion and slice timing \ - in fMRI. IEEE Trans Med Imaging. 2011 Aug;30(8):1546-54. DOI_. - - .. _DOI: http://dx.doi.org/10.1109/TMI.2011.2131152 - - """ - - input_spec = FmriRealign4dInputSpec - output_spec = FmriRealign4dOutputSpec - keywords = ['slice timing', 'motion correction'] - - def __init__(self, **inputs): - DeprecationWarning(('Will be deprecated in release 0.13. Please use' - 'SpaceTimeRealigner')) - BaseInterface.__init__(self, **inputs) - - def _run_interface(self, runtime): - from nipy.algorithms.registration import FmriRealign4d as FR4d - all_ims = [load_image(fname) for fname in self.inputs.in_file] - - if not isdefined(self.inputs.tr_slices): - TR_slices = None - else: - TR_slices = self.inputs.tr_slices - - R = FR4d( - all_ims, - tr=self.inputs.tr, - slice_order=self.inputs.slice_order, - tr_slices=TR_slices, - time_interp=self.inputs.time_interp, - start=self.inputs.start) - - R.estimate( - loops=list(self.inputs.loops), - between_loops=list(self.inputs.between_loops), - speedup=list(self.inputs.speedup)) - - corr_run = R.resample() - self._out_file_path = [] - self._par_file_path = [] - - for j, corr in enumerate(corr_run): - self._out_file_path.append( - os.path.abspath('corr_%s.nii.gz' % - (split_filename(self.inputs.in_file[j])[1]))) - save_image(corr, self._out_file_path[j]) - - self._par_file_path.append( - os.path.abspath('%s.par' % - (os.path.split(self.inputs.in_file[j])[1]))) - mfile = open(self._par_file_path[j], 'w') - motion = R._transforms[j] - # nipy does not encode euler angles. return in original form of - # translation followed by rotation vector see: - # http://en.wikipedia.org/wiki/Rodrigues'_rotation_formula - for i, mo in enumerate(motion): - params = [ - '%.10f' % item - for item in np.hstack((mo.translation, mo.rotation)) - ] - string = ' '.join(params) + '\n' - mfile.write(string) - mfile.close() - - return runtime - - def _list_outputs(self): - outputs = self._outputs().get() - outputs['out_file'] = self._out_file_path - outputs['par_file'] = self._par_file_path - return outputs - - class SpaceTimeRealignerInputSpec(BaseInterfaceInputSpec): - in_file = InputMultiPath( File(exists=True), mandatory=True, diff --git a/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py b/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py deleted file mode 100644 index eea799c486..0000000000 --- a/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py +++ /dev/null @@ -1,37 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals -from ..preprocess import FmriRealign4d - - -def test_FmriRealign4d_inputs(): - input_map = dict( - between_loops=dict(usedefault=True, ), - ignore_exception=dict( - deprecated='1.0.0', - nohash=True, - usedefault=True, - ), - in_file=dict(mandatory=True, ), - loops=dict(usedefault=True, ), - slice_order=dict(requires=['time_interp'], ), - speedup=dict(usedefault=True, ), - start=dict(usedefault=True, ), - time_interp=dict(requires=['slice_order'], ), - tr=dict(mandatory=True, ), - tr_slices=dict(requires=['time_interp'], ), - ) - inputs = FmriRealign4d.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(inputs.traits()[key], metakey) == value -def test_FmriRealign4d_outputs(): - output_map = dict( - out_file=dict(), - par_file=dict(), - ) - outputs = FmriRealign4d.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index 39bfbd7a6f..9d0bc3c699 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -101,8 +101,4 @@ def save(self, filename=None): savepkl(filename, self) def load(self, filename): - if '.npz' in filename: - DeprecationWarning(('npz files will be deprecated in the next ' - 'release. you can use numpy to open them.')) - return np.load(filename) return loadpkl(filename) diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 301a35844e..e8de06f7ec 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -280,7 +280,7 @@ def load_resultfile(path, name): except UnicodeDecodeError: # Was this pickle created with Python 2.x? pickle.load(pkl_file, fix_imports=True, encoding='utf-8') - logger.warning('Successfully loaded pickle in compatibility mode') + logger.warning('Successfully loaded pkl in compatibility mode') except (traits.TraitError, AttributeError, ImportError, EOFError) as err: if isinstance(err, (AttributeError, ImportError)): @@ -439,6 +439,8 @@ def modify_paths(object, relative=True, basedir=None): raise IOError('File %s not found' % out) else: out = object + else: + raise TypeError("Object {} is undefined".format(object)) return out diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 8abef8bf14..a10dabef30 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -488,6 +488,7 @@ def export(self, flatgraph = self._create_flat_graph() nodes = nx.topological_sort(flatgraph) + all_lines = None lines = ['# Workflow'] importlines = [ 'from nipype.pipeline.engine import Workflow, ' diff --git a/nipype/pipeline/plugins/__init__.py b/nipype/pipeline/plugins/__init__.py index fd93146adb..6d2467afdf 100644 --- a/nipype/pipeline/plugins/__init__.py +++ b/nipype/pipeline/plugins/__init__.py @@ -6,7 +6,6 @@ from .debug import DebugPlugin from .linear import LinearPlugin -from .ipythonx import IPythonXPlugin from .pbs import PBSPlugin from .oar import OARPlugin from .sge import SGEPlugin diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index 0ac21b04f8..aa20f935c1 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -39,6 +39,8 @@ def execute_task(pckld_task, node_config, updatehash): result = task.run(updatehash=updatehash) except: traceback = format_exc() + from pickle import loads + task = loads(pckld_task) result = task.result os.chdir(cwd) return result, traceback, gethostname() diff --git a/nipype/pipeline/plugins/ipythonx.py b/nipype/pipeline/plugins/ipythonx.py deleted file mode 100644 index 2ab583314d..0000000000 --- a/nipype/pipeline/plugins/ipythonx.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Parallel workflow execution via IPython controller -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -import sys -from future.utils import raise_from - -from ... import LooseVersion -from .base import (DistributedPluginBase, logger, report_crash) - -IPython_not_loaded = False -try: - from IPython import __version__ as IPyversion - if LooseVersion(IPyversion) < LooseVersion('0.11'): - from IPython.kernel.contexts import ConnectionRefusedError -except ImportError: - IPython_not_loaded = True - - -class IPythonXPlugin(DistributedPluginBase): - """Execute workflow with ipython - """ - - def __init__(self, plugin_args=None): - if LooseVersion(IPyversion) > LooseVersion('0.10.1'): - raise EnvironmentError(('The IPythonX plugin can only be used with' - ' older IPython versions. Please use the ' - 'IPython plugin instead.')) - DeprecationWarning('This plugin will be deprecated as of version 0.13') - if IPython_not_loaded: - raise ImportError('ipyparallel could not be imported') - super(IPythonXPlugin, self).__init__(plugin_args=plugin_args) - self.ipyclient = None - self.taskclient = None - - def run(self, graph, config, updatehash=False): - """Executes a pre-defined pipeline is distributed approaches - based on IPython's ipyparallel processing interface - """ - # retrieve clients again - try: - name = 'IPython.kernel.client' - __import__(name) - self.ipyclient = sys.modules[name] - except ImportError as e: - raise_from( - ImportError("Ipython kernel not found. Parallel " - "execution will be unavailable"), e) - try: - self.taskclient = self.ipyclient.TaskClient() - except Exception as e: - if isinstance(e, ConnectionRefusedError): - raise_from(Exception("No IPython clients found."), e) - if isinstance(e, ValueError): - raise_from(Exception("Ipython kernel not installed"), e) - return super(IPythonXPlugin, self).run( - graph, config, updatehash=updatehash) - - def _get_result(self, taskid): - return self.taskclient.get_task_result(taskid, block=False) - - def _submit_job(self, node, updatehash=False): - cmdstr = """import sys -from traceback import format_exception -traceback=None -result=None -try: - result = task.run(updatehash=updatehash) -except: - etype, eval, etr = sys.exc_info() - traceback = format_exception(etype,eval,etr) - result = task.result -""" - task = self.ipyclient.StringTask( - cmdstr, - push=dict(task=node, updatehash=updatehash), - pull=['result', 'traceback']) - return self.taskclient.run(task, block=False) - - def _report_crash(self, node, result=None): - if result and result['traceback']: - node._result = result['result'] - node._traceback = result['traceback'] - return report_crash(node, traceback=result['traceback']) - else: - return report_crash(node) - - def _clear_task(self, taskid): - if IPyversion >= '0.10.1': - logger.debug("Clearing id: %d" % taskid) - self.taskclient.clear(taskid) diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index ff337dc973..7eafb8d33c 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -427,6 +427,8 @@ def copyfile(originalfile, hashfn = hash_timestamp elif hashmethod == 'content': hashfn = hash_infile + else: + raise AttributeError("Unknown hash method found:", hashmethod) newhash = hashfn(newfile) fmlogger.debug('File: %s already exists,%s, copy:%d', newfile, newhash, copy) @@ -622,16 +624,6 @@ def load_json(filename): def loadcrash(infile, *args): if '.pkl' in infile: return loadpkl(infile) - elif '.npz' in infile: - DeprecationWarning(('npz files will be deprecated in the next ' - 'release. you can use numpy to open them.')) - data = np.load(infile) - out = {} - for k in data.files: - out[k] = [f for f in data[k].flat] - if len(out[k]) == 1: - out[k] = out[k].pop() - return out else: raise ValueError('Only pickled crashfiles are supported') diff --git a/nipype/utils/tests/test_cmd.py b/nipype/utils/tests/test_cmd.py index 5cc704c51d..0e16e0aad8 100644 --- a/nipype/utils/tests/test_cmd.py +++ b/nipype/utils/tests/test_cmd.py @@ -88,54 +88,6 @@ def test_list_nipy_interfacesp(self): \tComputeMask \tEstimateContrast \tFitGLM -\tFmriRealign4d \tSimilarity \tSpaceTimeRealigner """ - - def test_run_4d_realign_without_arguments(self): - with pytest.raises(SystemExit) as cm: - with capture_sys_output() as (stdout, stderr): - nipype_cmd.main( - ['nipype_cmd', 'nipype.interfaces.nipy', 'FmriRealign4d']) - - exit_exception = cm.value - assert exit_exception.code == 2 - - error_message = """usage: nipype_cmd nipype.interfaces.nipy FmriRealign4d [-h] - [--between_loops [BETWEEN_LOOPS [BETWEEN_LOOPS ...]]] - [--ignore_exception] - [--loops [LOOPS [LOOPS ...]]] - [--slice_order SLICE_ORDER] - [--speedup [SPEEDUP [SPEEDUP ...]]] - [--start START] - [--time_interp TIME_INTERP] - [--tr_slices TR_SLICES] - in_file [in_file ...] - tr""" - - if not PY2: - error_message += """ -nipype_cmd nipype.interfaces.nipy FmriRealign4d: error: the following arguments are required: in_file, tr -""" - else: - error_message += """ -nipype_cmd nipype.interfaces.nipy FmriRealign4d: error: too few arguments -""" - - assert stderr.getvalue() == error_message - assert stdout.getvalue() == '' - - def test_run_4d_realign_help(self): - with pytest.raises(SystemExit) as cm: - with capture_sys_output() as (stdout, stderr): - nipype_cmd.main([ - 'nipype_cmd', 'nipype.interfaces.nipy', 'FmriRealign4d', - '-h' - ]) - - exit_exception = cm.value - assert exit_exception.code == 0 - - assert stderr.getvalue() == '' - assert "Run FmriRealign4d" in stdout.getvalue()