diff --git a/CHANGES b/CHANGES index 01a09b735a..f0dcfa59dc 100644 --- a/CHANGES +++ b/CHANGES @@ -2,6 +2,8 @@ Upcoming release (0.14.1) ================ * MAINT: Cleaning / simplify ``Node`` (https://github.com/nipy/nipype/pull/#2325) +* STY: Cleanup of PEP8 violations (https://github.com/nipy/nipype/pull/2358) +* STY: Cleanup of trailing spaces and adding of missing newlines at end of files (https://github.com/nipy/nipype/pull/2355) 0.14.0 (November 29, 2017) ========================== diff --git a/build_docs.py b/build_docs.py index e2ae75b7b9..c39a8da473 100644 --- a/build_docs.py +++ b/build_docs.py @@ -8,7 +8,8 @@ python setup.py build_sphinx """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open, str # Standard library imports @@ -28,7 +29,7 @@ DOC_BUILD_DIR = os.path.join('doc', '_build', 'html') DOC_DOCTREES_DIR = os.path.join('doc', '_build', 'doctrees') -################################################################################ +############################################################################### # Distutils Command class for installing nipype to a temporary location. @@ -63,7 +64,7 @@ def finalize_options(self): pass -################################################################################ +############################################################################### # Distutils Command class for API generation class APIDocs(TempInstall): description = \ @@ -93,7 +94,7 @@ def run(self): os.chdir('..') -################################################################################ +############################################################################### # Code to copy the sphinx-generated html docs in the distribution. def relative_path(filename): """ Return the relative path to the file, assuming the file is @@ -103,7 +104,7 @@ def relative_path(filename): return os.path.abspath(filename)[length:] -################################################################################ +############################################################################### # Distutils Command class build the docs # Sphinx import. try: @@ -165,7 +166,7 @@ def finalize_options(self): self.build_dir = os.path.join(*DOC_BUILD_DIR.split(os.sep)[:-1]) BuildDoc.finalize_options(self) -################################################################################ +############################################################################### # Distutils Command class to clean diff --git a/doc/sphinxext/autosummary_generate.py b/doc/sphinxext/autosummary_generate.py index d30edf6aec..658c50e4a4 100755 --- a/doc/sphinxext/autosummary_generate.py +++ b/doc/sphinxext/autosummary_generate.py @@ -39,9 +39,9 @@ def main(): help="Phantom import modules from a file") p.add_option("-o", "--output-dir", action="store", type="string", dest="output_dir", default=None, - help=("Write all output files to the given directory (instead " - "of writing them as specified in the autosummary:: " - "directives)")) + help=("Write all output files to the given directory " + "(instead of writing them as specified in the " + "autosummary:: directives)")) options, args = p.parse_args() if len(args) == 0: @@ -161,9 +161,12 @@ def get_documented_in_lines(lines, module=None, filename=None): """ title_underline_re = re.compile("^[-=*_^#]{3,}\s*$") - autodoc_re = re.compile(".. auto(function|method|attribute|class|exception|module)::\s*([A-Za-z0-9_.]+)\s*$") + autodoc_re = re.compile( + ".. auto(function|method|attribute|class|exception|module)::" + "\s*([A-Za-z0-9_.]+)\s*$") autosummary_re = re.compile(r'^\.\.\s+autosummary::\s*') - module_re = re.compile(r'^\.\.\s+(current)?module::\s*([a-zA-Z0-9_.]+)\s*$') + module_re = re.compile( + r'^\.\.\s+(current)?module::\s*([a-zA-Z0-9_.]+)\s*$') autosummary_item_re = re.compile(r'^\s+([_a-zA-Z][a-zA-Z0-9_.]*)\s*.*?') toctree_arg_re = re.compile(r'^\s+:toctree:\s*(.*?)\s*$') @@ -189,7 +192,8 @@ def get_documented_in_lines(lines, module=None, filename=None): m = autosummary_item_re.match(line) if m: name = m.group(1).strip() - if current_module and not name.startswith(current_module + '.'): + if current_module and not name.startswith( + current_module + '.'): name = "%s.%s" % (current_module, name) documented.setdefault(name, []).append( (filename, current_title, 'autosummary', toctree)) @@ -210,7 +214,8 @@ def get_documented_in_lines(lines, module=None, filename=None): current_module = name documented.update(get_documented_in_docstring( name, filename=filename)) - elif current_module and not name.startswith(current_module + '.'): + elif current_module and not name.startswith( + current_module + '.'): name = "%s.%s" % (current_module, name) documented.setdefault(name, []).append( (filename, current_title, "auto" + m.group(1), None)) @@ -230,5 +235,6 @@ def get_documented_in_lines(lines, module=None, filename=None): return documented + if __name__ == "__main__": main() diff --git a/doc/sphinxext/ipython_console_highlighting.py b/doc/sphinxext/ipython_console_highlighting.py index 6720056dc8..a400d3c9c1 100644 --- a/doc/sphinxext/ipython_console_highlighting.py +++ b/doc/sphinxext/ipython_console_highlighting.py @@ -59,7 +59,6 @@ class IPythonConsoleLexer(Lexer): def get_tokens_unprocessed(self, text): pylexer = PythonLexer(**self.options) - tblexer = PythonTracebackLexer(**self.options) curcode = '' insertions = [] @@ -72,21 +71,22 @@ def get_tokens_unprocessed(self, text): insertions.append((len(curcode), [(0, Comment, line)])) elif input_prompt is not None: - insertions.append((len(curcode), - [(0, Generic.Prompt, input_prompt.group())])) + insertions.append((len( + curcode), [(0, Generic.Prompt, input_prompt.group())])) curcode += line[input_prompt.end():] elif continue_prompt is not None: - insertions.append((len(curcode), - [(0, Generic.Prompt, continue_prompt.group())])) + insertions.append((len( + curcode), [(0, Generic.Prompt, continue_prompt.group())])) curcode += line[continue_prompt.end():] elif output_prompt is not None: - insertions.append((len(curcode), - [(0, Generic.Output, output_prompt.group())])) + insertions.append((len( + curcode), [(0, Generic.Output, output_prompt.group())])) curcode += line[output_prompt.end():] else: if curcode: for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)): + pylexer.get_tokens_unprocessed( + curcode)): yield item curcode = '' insertions = [] diff --git a/doc/sphinxext/numpy_ext/docscrape.py b/doc/sphinxext/numpy_ext/docscrape.py index a4ff432715..28e3b30948 100644 --- a/doc/sphinxext/numpy_ext/docscrape.py +++ b/doc/sphinxext/numpy_ext/docscrape.py @@ -295,7 +295,8 @@ def _parse(self): for (section, content) in self._read_sections(): if not section.startswith('..'): - section = ' '.join([s.capitalize() for s in section.split(' ')]) + section = ' '.join([s.capitalize() + for s in section.split(' ')]) if section in ('Parameters', 'Returns', 'Raises', 'Warns', 'Other Parameters', 'Attributes', 'Methods'): self[section] = self._parse_param_list(content) @@ -443,7 +444,7 @@ def __init__(self, func, role='func', doc=None, config={}): argspec = inspect.formatargspec(*argspec) argspec = argspec.replace('*', '\*') signature = '%s%s' % (func_name, argspec) - except TypeError as e: + except TypeError: signature = '%s()' % func_name self['Signature'] = signature @@ -459,7 +460,6 @@ def __str__(self): out = '' func, func_name = self.get_func() - signature = self['Signature'].replace('*', '\*') roles = {'func': 'function', 'meth': 'method'} diff --git a/doc/sphinxext/numpy_ext/numpydoc.py b/doc/sphinxext/numpy_ext/numpydoc.py index ccce7aad03..c21ada71e6 100644 --- a/doc/sphinxext/numpy_ext/numpydoc.py +++ b/doc/sphinxext/numpy_ext/numpydoc.py @@ -11,7 +11,7 @@ - Convert Parameters etc. sections to field lists. - Convert See Also section to a See also entry. - Renumber references. -- Extract the signature from the docstring, if it can't be determined otherwise. +- Extract signature from docstring, if it can't be determined otherwise. .. [1] https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt diff --git a/examples/dmri_dtk_dti.py b/examples/dmri_dtk_dti.py index 4a5e2676cf..dd8bff7449 100755 --- a/examples/dmri_dtk_dti.py +++ b/examples/dmri_dtk_dti.py @@ -13,9 +13,9 @@ More details can be found at http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/fdt/index.htm -In order to run this tutorial you need to have Diffusion Toolkit and FSL tools installed and -accessible from matlab/command line. Check by calling fslinfo and dtk from the command -line. +In order to run this tutorial you need to have Diffusion Toolkit and FSL tools +installed and accessible from matlab/command line. Check by calling fslinfo and +dtk from the command line. Tell python where to find the appropriate functions. """ @@ -51,8 +51,8 @@ The nipype tutorial contains data for two subjects. Subject data is in two subdirectories, ``dwis1`` and ``dwis2``. Each subject directory -contains each of the following files: bvec, bval, diffusion weighted data, a set of target masks, -a seed file, and a transformation matrix. +contains each of the following files: bvec, bval, diffusion weighted data, a +set of target masks, a seed file, and a transformation matrix. Below we set some variables to inform the ``datasource`` about the layout of our data. We specify the location of the data, the subject @@ -189,12 +189,13 @@ def getstripdir(subject_id): - return os.path.join(os.path.abspath('data/workingdir/dwiproc'), '_subject_id_%s' % subject_id) + return os.path.join(os.path.abspath('data/workingdir/dwiproc'), + '_subject_id_%s' % subject_id) """ -Setup the pipeline that combines the two workflows: tractography and computeTensor ----------------------------------------------------------------------------------- +Setup the pipeline that combines the 2 workflows: tractography & computeTensor +------------------------------------------------------------------------------ """ dwiproc = pe.Workflow(name="dwiproc") diff --git a/examples/dmri_dtk_odf.py b/examples/dmri_dtk_odf.py index b4fb978dd0..0bb4983f73 100755 --- a/examples/dmri_dtk_odf.py +++ b/examples/dmri_dtk_odf.py @@ -13,9 +13,9 @@ More details can be found at http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/fdt/index.htm -In order to run this tutorial you need to have Diffusion Toolkit and FSL tools installed and -accessible from matlab/command line. Check by calling fslinfo and dtk from the command -line. +In order to run this tutorial you need to have Diffusion Toolkit and FSL tools +installed and accessible from matlab/command line. Check by calling fslinfo and +dtk from the command line. Tell python where to find the appropriate functions. """ @@ -51,8 +51,8 @@ The nipype tutorial contains data for two subjects. Subject data is in two subdirectories, ``dwis1`` and ``dwis2``. Each subject directory -contains each of the following files: bvec, bval, diffusion weighted data, a set of target masks, -a seed file, and a transformation matrix. +contains each of the following files: bvec, bval, diffusion weighted data, a +set of target masks, a seed file, and a transformation matrix. Below we set some variables to inform the ``datasource`` about the layout of our data. We specify the location of the data, the subject @@ -154,7 +154,8 @@ compute_ODF.connect([ (fslroi, bet, [('roi_file', 'in_file')]), (eddycorrect, odf_recon, [('outputnode.eddy_corrected', 'DWI')]), - (eddycorrect, hardi_mat, [('outputnode.eddy_corrected', 'reference_file')]), + (eddycorrect, hardi_mat, [('outputnode.eddy_corrected', + 'reference_file')]), (hardi_mat, odf_recon, [('out_file', 'matrix')]) ]) @@ -181,8 +182,8 @@ """ -Setup the pipeline that combines the two workflows: tractography and compute_ODF ----------------------------------------------------------------------------------- +Setup the pipeline that combines the 2 workflows: tractography and compute_ODF +------------------------------------------------------------------------------ """ dwiproc = pe.Workflow(name="dwiproc") diff --git a/examples/dmri_fsl_dti.py b/examples/dmri_fsl_dti.py index 05891a8727..b0a3bef406 100755 --- a/examples/dmri_fsl_dti.py +++ b/examples/dmri_fsl_dti.py @@ -221,9 +221,12 @@ """ tractography.add_nodes([bedpostx, flirt]) -tractography.connect([(bedpostx, probtrackx, [('outputnode.thsamples', 'thsamples'), - ('outputnode.phsamples', 'phsamples'), - ('outputnode.fsamples', 'fsamples') +tractography.connect([(bedpostx, probtrackx, [('outputnode.thsamples', + 'thsamples'), + ('outputnode.phsamples', + 'phsamples'), + ('outputnode.fsamples', + 'fsamples') ]), (probtrackx, findthebiggest, [('targets', 'in_files')]), (flirt, probtrackx, [('out_matrix_file', 'xfm')]) @@ -239,13 +242,14 @@ def getstripdir(subject_id): - import os - return os.path.join(os.path.abspath('data/workingdir/dwiproc'), '_subject_id_%s' % subject_id) + import os + return os.path.join(os.path.abspath('data/workingdir/dwiproc'), + '_subject_id_%s' % subject_id) """ -Setup the pipeline that combines the two workflows: tractography and computeTensor ----------------------------------------------------------------------------------- +Setup the pipeline that combines the 2 workflows: tractography & computeTensor +------------------------------------------------------------------------------ """ dwiproc = pe.Workflow(name="dwiproc") @@ -261,15 +265,18 @@ def getstripdir(subject_id): ('seed_file', 'probtrackx.seed'), ('target_masks', 'probtrackx.target_masks') ]), - (computeTensor, tractography, [('eddycorrect.outputnode.eddy_corrected', 'bedpostx.inputnode.dwi'), - ('bet.mask_file', 'bedpostx.inputnode.mask'), + (computeTensor, tractography, [('eddycorrect.outputnode.eddy_corrected', + 'bedpostx.inputnode.dwi'), + ('bet.mask_file', + 'bedpostx.inputnode.mask'), ('bet.mask_file', 'probtrackx.mask'), ('fslroi.roi_file', 'flirt.reference')]), (infosource, datasink, [('subject_id', 'container'), (('subject_id', getstripdir), 'strip_dir')]), - (tractography, datasink, [('findthebiggest.out_file', 'fbiggest.@biggestsegmentation')]) + (tractography, datasink, [ + ('findthebiggest.out_file', 'fbiggest.@biggestsegmentation')]) ]) if __name__ == '__main__': - dwiproc.run() - dwiproc.write_graph() + dwiproc.run() + dwiproc.write_graph() diff --git a/examples/dmri_preprocessing.py b/examples/dmri_preprocessing.py index d814e4facc..d713d0ef85 100644 --- a/examples/dmri_preprocessing.py +++ b/examples/dmri_preprocessing.py @@ -81,7 +81,8 @@ """ datasource = pe.Node(nio.DataGrabber(infields=['subject_id'], - outfields=list(info.keys())), name='datasource') + outfields=list(info.keys())), + name='datasource') datasource.inputs.template = "%s/%s" @@ -100,7 +101,8 @@ """ inputnode = pe.Node(niu.IdentityInterface(fields=["dwi", "bvecs", "bvals", - "dwi_rev"]), name="inputnode") + "dwi_rev"]), + name="inputnode") """ diff --git a/examples/fmri_ants_openfmri.py b/examples/fmri_ants_openfmri.py index ee6ddee3f9..c30bc0f00a 100755 --- a/examples/fmri_ants_openfmri.py +++ b/examples/fmri_ants_openfmri.py @@ -203,8 +203,6 @@ def create_reg_workflow(name='registration'): Concatenate the affine and ants transforms into a list """ - pickfirst = lambda x: x[0] - merge = pe.Node(niu.Merge(2), iterfield=['in2'], name='mergexfm') register.connect(convert2itk, 'itk_transform', merge, 'in2') register.connect(reg, 'composite_transform', merge, 'in1') @@ -414,8 +412,6 @@ def create_fs_reg_workflow(name='registration'): Concatenate the affine and ants transforms into a list """ - pickfirst = lambda x: x[0] - merge = Node(Merge(2), iterfield=['in2'], name='mergexfm') register.connect(convert2itk, 'itk_transform', merge, 'in2') register.connect(reg, 'composite_transform', merge, 'in1') diff --git a/examples/fmri_fsl_reuse.py b/examples/fmri_fsl_reuse.py index beb09f0345..41bb2c4ee9 100755 --- a/examples/fmri_fsl_reuse.py +++ b/examples/fmri_fsl_reuse.py @@ -20,13 +20,13 @@ from builtins import str from builtins import range -import os # system functions -import nipype.interfaces.io as nio # Data i/o -import nipype.interfaces.fsl as fsl # fsl -from nipype.interfaces import utility as niu # Utilities -import nipype.pipeline.engine as pe # pypeline engine -import nipype.algorithms.modelgen as model # model generation -import nipype.algorithms.rapidart as ra # artifact detection +import os # system functions +import nipype.interfaces.io as nio # Data i/o +import nipype.interfaces.fsl as fsl # fsl +from nipype.interfaces import utility as niu # Utilities +import nipype.pipeline.engine as pe # pypeline engine +import nipype.algorithms.modelgen as model # model generation +import nipype.algorithms.rapidart as ra # artifact detection from nipype.workflows.fmri.fsl import (create_featreg_preproc, create_modelfit_workflow, @@ -57,11 +57,11 @@ """ art = pe.MapNode(ra.ArtifactDetect(use_differences=[True, False], - use_norm=True, - norm_threshold=1, - zintensity_threshold=3, - parameter_source='FSL', - mask_type='file'), + use_norm=True, + norm_threshold=1, + zintensity_threshold=3, + parameter_source='FSL', + mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") @@ -170,7 +170,7 @@ def num_copes(files): """ datasource = pe.Node(nio.DataGrabber(infields=['subject_id'], - outfields=['func', 'struct']), + outfields=['func', 'struct']), name='datasource') datasource.inputs.template = 'nipype-tutorial/data/%s/%s.nii' datasource.inputs.template_args = info diff --git a/examples/fmri_slicer_coregistration.py b/examples/fmri_slicer_coregistration.py index e0129651dd..5c2e866264 100755 --- a/examples/fmri_slicer_coregistration.py +++ b/examples/fmri_slicer_coregistration.py @@ -111,7 +111,8 @@ pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]), (datasource, coregister, [('func', 'movingVolume')]), (datasource, coregister, [('struct', 'fixedVolume')]), - (coregister, reslice, [('outputTransform', 'warpTransform')]), + (coregister, reslice, [('outputTransform', + 'warpTransform')]), (datasource, reslice, [('func', 'inputVolume')]), (datasource, reslice, [('struct', 'referenceVolume')]) ]) diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index 33c03f8bf0..caf044d47e 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -23,7 +23,6 @@ import nipype.interfaces.spm as spm # spm import nipype.interfaces.fsl as fsl # fsl import nipype.interfaces.matlab as mlab # how to run matlab -import nipype.interfaces.fsl as fsl # fsl import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import nipype.algorithms.modelgen as model # model specification diff --git a/examples/fmri_spm_dartel.py b/examples/fmri_spm_dartel.py index 746da9f810..cce483eb95 100755 --- a/examples/fmri_spm_dartel.py +++ b/examples/fmri_spm_dartel.py @@ -19,15 +19,15 @@ from builtins import str from builtins import range -import nipype.interfaces.io as nio # Data i/o -import nipype.interfaces.spm as spm # spm -import nipype.workflows.fmri.spm as spm_wf # spm -import nipype.interfaces.fsl as fsl # fsl -from nipype.interfaces import utility as niu # Utilities -import nipype.pipeline.engine as pe # pypeline engine -import nipype.algorithms.rapidart as ra # artifact detection -import nipype.algorithms.modelgen as model # model specification -import os # system functions +import nipype.interfaces.io as nio # Data i/o +import nipype.interfaces.spm as spm # spm +import nipype.workflows.fmri.spm as spm_wf # spm +import nipype.interfaces.fsl as fsl # fsl +from nipype.interfaces import utility as niu # Utilities +import nipype.pipeline.engine as pe # pypeline engine +import nipype.algorithms.rapidart as ra # artifact detection +import nipype.algorithms.modelgen as model # model specification +import os # system functions """ @@ -263,7 +263,7 @@ inputnode = pe.Node(niu.IdentityInterface(fields=['in_data']), name='inputnode') datasource = pe.Node(nio.DataGrabber(infields=['subject_id'], - outfields=['func', 'struct']), + outfields=['func', 'struct']), name='datasource') datasource.inputs.template = 'nipype-tutorial/data/%s/%s.nii' datasource.inputs.template_args = info @@ -273,7 +273,7 @@ """ datasource_dartel = pe.MapNode(nio.DataGrabber(infields=['subject_id'], - outfields=['struct']), + outfields=['struct']), name='datasource_dartel', iterfield=['subject_id']) datasource_dartel.inputs.template = 'nipype-tutorial/data/%s/%s.nii' @@ -308,9 +308,9 @@ def pickFieldFlow(dartel_flow_fields, subject_id): raise Exception pick_flow = pe.Node(niu.Function(input_names=['dartel_flow_fields', - 'subject_id'], - output_names=['dartel_flow_field'], - function=pickFieldFlow), + 'subject_id'], + output_names=['dartel_flow_field'], + function=pickFieldFlow), name="pick_flow") """ diff --git a/examples/fmri_spm_nested.py b/examples/fmri_spm_nested.py index 9f1b51469d..2c0b295c8c 100755 --- a/examples/fmri_spm_nested.py +++ b/examples/fmri_spm_nested.py @@ -61,6 +61,8 @@ ----------------------------------------------- """ + + def _template_path(in_data): import os.path as op return op.abspath(op.join(in_data, 'nipype-tutorial/data/T1.nii')) diff --git a/examples/frontiers_paper/smoothing_comparison.py b/examples/frontiers_paper/smoothing_comparison.py index 11698c0379..d2d5d738f5 100644 --- a/examples/frontiers_paper/smoothing_comparison.py +++ b/examples/frontiers_paper/smoothing_comparison.py @@ -26,8 +26,9 @@ name="iter_fwhm") iter_fwhm.iterables = [('fwhm', [4, 8])] -iter_smoothing_method = pe.Node(interface=util.IdentityInterface(fields=["smoothing_method"]), - name="iter_smoothing_method") +iter_smoothing_method = pe.Node(interface=util.IdentityInterface( + fields=["smoothing_method"]), + name="iter_smoothing_method") iter_smoothing_method.iterables = [('smoothing_method', ['isotropic_voxel', 'anisotropic_voxel', 'isotropic_surface'])] @@ -44,8 +45,9 @@ compute_mask = pe.Node(interface=nipy.ComputeMask(), name="compute_mask") preprocessing.connect(realign, "mean_image", compute_mask, "mean_volume") -anisotropic_voxel_smooth = fsl_wf.create_susan_smooth(name="anisotropic_voxel_smooth", - separate_masks=False) +anisotropic_voxel_smooth = fsl_wf.create_susan_smooth( + name="anisotropic_voxel_smooth", + separate_masks=False) anisotropic_voxel_smooth.inputs.smooth.output_type = 'NIFTI' preprocessing.connect(realign, "realigned_files", anisotropic_voxel_smooth, "inputnode.in_files") @@ -64,9 +66,10 @@ preprocessing.connect(recon_all, 'subject_id', surfregister, 'subject_id') preprocessing.connect(recon_all, 'subjects_dir', surfregister, 'subjects_dir') -isotropic_surface_smooth = pe.MapNode(interface=fs.Smooth(proj_frac_avg=(0, 1, 0.1)), - iterfield=['in_file'], - name="isotropic_surface_smooth") +isotropic_surface_smooth = pe.MapNode( + interface=fs.Smooth(proj_frac_avg=(0, 1, 0.1)), + iterfield=['in_file'], + name="isotropic_surface_smooth") preprocessing.connect(surfregister, 'out_reg_file', isotropic_surface_smooth, 'reg_file') preprocessing.connect(realign, "realigned_files", isotropic_surface_smooth, @@ -93,8 +96,10 @@ def chooseindex(roi): - return {'isotropic_voxel': list(range(0, 4)), 'anisotropic_voxel': list(range(4, 8)), - 'isotropic_surface': list(range(8, 12))}[roi] + return {'isotropic_voxel': list(range(0, 4)), + 'anisotropic_voxel': list(range(4, 8)), + 'isotropic_surface': list(range(8, 12))}[roi] + preprocessing.connect(iter_smoothing_method, ("smoothing_method", chooseindex), select_smoothed_files, 'index') @@ -109,10 +114,11 @@ def chooseindex(roi): specify_model.inputs.input_units = 'secs' specify_model.inputs.time_repetition = 3. specify_model.inputs.high_pass_filter_cutoff = 120 -specify_model.inputs.subject_info = [Bunch(conditions=['Task-Odd', 'Task-Even'], - onsets=[list(range(15, 240, 60)), - list(range(45, 240, 60))], - durations=[[15], [15]])] * 4 +specify_model.inputs.subject_info = [ + Bunch(conditions=['Task-Odd', 'Task-Even'], + onsets=[list(range(15, 240, 60)), + list(range(45, 240, 60))], + durations=[[15], [15]])] * 4 level1design = pe.Node(interface=spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} @@ -132,7 +138,8 @@ def chooseindex(roi): modelling.connect(level1design, 'spm_mat_file', level1estimate, 'spm_mat_file') modelling.connect(level1estimate, 'spm_mat_file', contrastestimate, 'spm_mat_file') -modelling.connect(level1estimate, 'beta_images', contrastestimate, 'beta_images') +modelling.connect(level1estimate, 'beta_images', + contrastestimate, 'beta_images') modelling.connect(level1estimate, 'residual_image', contrastestimate, 'residual_image') @@ -150,9 +157,10 @@ def chooseindex(roi): name='datasource') datasource.inputs.base_directory = os.path.abspath('data') datasource.inputs.template = '%s/%s.nii' -datasource.inputs.template_args = info = dict(func=[['subject_id', - ['f3', 'f5', 'f7', 'f10']]], - struct=[['subject_id', 'struct']]) +datasource.inputs.template_args = info = dict( + func=[['subject_id', + ['f3', 'f5', 'f7', 'f10']]], + struct=[['subject_id', 'struct']]) datasource.inputs.subject_id = 's1' datasource.inputs.sort_filelist = True @@ -161,7 +169,8 @@ def chooseindex(roi): 'recon_all.T1_files') datasink = pe.Node(interface=nio.DataSink(), name="datasink") -datasink.inputs.base_directory = os.path.abspath('smoothing_comparison_workflow/output') +datasink.inputs.base_directory = os.path.abspath( + 'smoothing_comparison_workflow/output') datasink.inputs.regexp_substitutions = [("_rename[0-9]", "")] main_workflow.connect(modelling, 'contrastestimate.spmT_images', datasink, diff --git a/examples/frontiers_paper/workflow_from_scratch.py b/examples/frontiers_paper/workflow_from_scratch.py index fd8ba8ffd3..5f57432820 100644 --- a/examples/frontiers_paper/workflow_from_scratch.py +++ b/examples/frontiers_paper/workflow_from_scratch.py @@ -67,10 +67,11 @@ specify_model.inputs.input_units = 'secs' specify_model.inputs.time_repetition = 3. specify_model.inputs.high_pass_filter_cutoff = 120 -specify_model.inputs.subject_info = [Bunch(conditions=['Task-Odd', 'Task-Even'], - onsets=[list(range(15, 240, 60)), - list(range(45, 240, 60))], - durations=[[15], [15]])] * 4 +specify_model.inputs.subject_info = [ + Bunch(conditions=['Task-Odd', 'Task-Even'], + onsets=[list(range(15, 240, 60)), + list(range(45, 240, 60))], + durations=[[15], [15]])] * 4 level1design = pe.Node(interface=spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} @@ -91,7 +92,8 @@ modelling.connect(level1design, 'spm_mat_file', level1estimate, 'spm_mat_file') modelling.connect(level1estimate, 'spm_mat_file', contrastestimate, 'spm_mat_file') -modelling.connect(level1estimate, 'beta_images', contrastestimate, 'beta_images') +modelling.connect(level1estimate, 'beta_images', + contrastestimate, 'beta_images') modelling.connect(level1estimate, 'residual_image', contrastestimate, 'residual_image') @@ -116,7 +118,8 @@ parameterize it with subject ID. In this way we will be able to run it for different subjects. We can automate this by iterating over a list of subject Ids, by setting an iterables property on the subject_id input of DataGrabber. -Its output will be connected to realignment node from preprocessing workflow.""" +Its output will be connected to realignment node from preprocessing workflow. +""" datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func']), @@ -132,10 +135,12 @@ """DataSink on the other side provides means to storing selected results to a specified location. It supports automatic creation of folder stricter and -regular expression based substitutions. In this example we will store T maps.""" +regular expression based substitutions. In this example we will store T maps. +""" datasink = pe.Node(interface=nio.DataSink(), name="datasink") -datasink.inputs.base_directory = os.path.abspath('workflow_from_scratch/output') +datasink.inputs.base_directory = os.path.abspath( + 'workflow_from_scratch/output') main_workflow.connect(modelling, 'contrastestimate.spmT_images', datasink, 'contrasts.@T') diff --git a/examples/howto_caching_example.py b/examples/howto_caching_example.py index b5c548fc76..ae0a06b65b 100644 --- a/examples/howto_caching_example.py +++ b/examples/howto_caching_example.py @@ -35,7 +35,8 @@ # Merge all these files along the time dimension out_merge = mem.cache(fsl.Merge)(dimension="t", - in_files=[t.outputs.out_file for t in threshold], + in_files=[ + t.outputs.out_file for t in threshold], ) # And finally compute the mean out_mean = mem.cache(fsl.MeanImage)(in_file=out_merge.outputs.merged_file) diff --git a/examples/rsfmri_vol_surface_preprocessing_nipy.py b/examples/rsfmri_vol_surface_preprocessing_nipy.py index 51a5742284..94c4a76d04 100644 --- a/examples/rsfmri_vol_surface_preprocessing_nipy.py +++ b/examples/rsfmri_vol_surface_preprocessing_nipy.py @@ -232,6 +232,7 @@ def build_filter1(motion_params, comp_norm, outliers, detrend_poly=None): out_files.append(filename) return out_files + def rename(in_files, suffix=None): from nipype.utils.filemanip import (filename_to_list, split_filename, list_to_filename) diff --git a/examples/smri_ants_registration.py b/examples/smri_ants_registration.py index cda3e49549..cef7dc0593 100644 --- a/examples/smri_ants_registration.py +++ b/examples/smri_ants_registration.py @@ -39,8 +39,10 @@ print(mydatadir) MyFileURLs = [ - ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13121', + '01_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13122', + '02_T1_half.nii.gz'), ] for tt in MyFileURLs: myURL = tt[0] @@ -67,7 +69,8 @@ """ -reg = Registration(from_file=example_data('smri_ants_registration_settings.json')) +reg = Registration(from_file=example_data( + 'smri_ants_registration_settings.json')) reg.inputs.fixed_image = input_images[0] reg.inputs.moving_image = input_images[1] diff --git a/examples/smri_cbs_skullstripping.py b/examples/smri_cbs_skullstripping.py index 16fe3de000..f87e822096 100644 --- a/examples/smri_cbs_skullstripping.py +++ b/examples/smri_cbs_skullstripping.py @@ -6,19 +6,22 @@ sMRI: USing CBS Tools for skullstripping ======================================== -This simple workflow uses SPECTRE2010 algorithm to skullstrip an MP2RAGE anatomical scan. +This simple workflow uses SPECTRE2010 algorithm to skullstrip an MP2RAGE +anatomical scan. """ import nipype.pipeline.engine as pe -from nipype.interfaces.mipav.developer import JistIntensityMp2rageMasking, MedicAlgorithmSPECTRE2010 +from nipype.interfaces.mipav.developer import (JistIntensityMp2rageMasking, + MedicAlgorithmSPECTRE2010) wf = pe.Workflow("skullstripping") mask = pe.Node(JistIntensityMp2rageMasking(), name="masking") -mask.inputs.inSecond = "/Users/filo/7t_trt/niftis/sub001/session_1/MP2RAGE_INV2.nii.gz" -mask.inputs.inQuantitative = "/Users/filo/7t_trt/niftis/sub001/session_1/MP2RAGE_UNI.nii.gz" -mask.inputs.inT1weighted = "/Users/filo/7t_trt/niftis/sub001/session_1/MP2RAGE_T1.nii.gz" +folder_path = '/Users/filo/7t_trt/niftis/sub001/session_1/' +mask.inputs.inSecond = folder_path + "MP2RAGE_INV2.nii.gz" +mask.inputs.inQuantitative = folder_path + "MP2RAGE_UNI.nii.gz" +mask.inputs.inT1weighted = folder_path + "MP2RAGE_T1.nii.gz" mask.inputs.outMasked = True mask.inputs.outMasked2 = True mask.inputs.outSignal = True diff --git a/examples/smri_fsreconall.py b/examples/smri_fsreconall.py index 970d4a513a..9da027720f 100644 --- a/examples/smri_fsreconall.py +++ b/examples/smri_fsreconall.py @@ -59,7 +59,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['struct']), - name='datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']]) @@ -88,6 +88,7 @@ name="average") average.inputs.subjects_dir = subjects_dir -wf.connect(recon_all, 'postdatasink_outputspec.subject_id', average, 'subjects_ids') +wf.connect(recon_all, 'postdatasink_outputspec.subject_id', + average, 'subjects_ids') wf.run("MultiProc", plugin_args={'n_procs': 4}) diff --git a/examples/test_spm.py b/examples/test_spm.py index 1242102f1b..dd769dffd1 100644 --- a/examples/test_spm.py +++ b/examples/test_spm.py @@ -13,6 +13,7 @@ def _get_first(inlist): return inlist[0] return inlist + def test_spm(name='test_spm_3d'): """ A simple workflow to test SPM's installation. By default will split the 4D volume in @@ -32,7 +33,6 @@ def test_spm(name='test_spm_3d'): realign_estwrite = pe.Node(spm.Realign(jobtype='estwrite'), name='realign_estwrite') smooth = pe.Node(spm.Smooth(fwhm=[6, 6, 6]), name='smooth') - if name == 'test_spm_3d': split = pe.Node(fsl.Split(dimension="t", output_type="NIFTI"), name="split") workflow.connect([ diff --git a/nipype/__init__.py b/nipype/__init__.py index f761a8ef09..7ef46c9618 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from distutils.version import LooseVersion @@ -30,13 +31,14 @@ def __call__(self, doctests=True): try: import pytest except: - raise RuntimeError('py.test not installed, run: pip install pytest') + raise RuntimeError( + 'py.test not installed, run: pip install pytest') params = {'args': []} if doctests: params['args'].append('--doctest-modules') nipype_path = os.path.dirname(__file__) - params['args'].extend(['-x', '--ignore={}/external'.format(nipype_path), - nipype_path]) + params['args'].extend( + ['-x', '--ignore={}/external'.format(nipype_path), nipype_path]) pytest.main(**params) test = NipypeTester() diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 4bb0ddeac6..aa730350bd 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) ''' -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range import os @@ -53,17 +54,17 @@ class ComputeDVARSInputSpec(BaseInterfaceInputSpec): desc='output figure size') figformat = traits.Enum('png', 'pdf', 'svg', usedefault=True, desc='output format for figures') - intensity_normalization = traits.Float(1000.0, usedefault=True, - desc='Divide value in each voxel at each timepoint ' - 'by the median calculated across all voxels' - 'and timepoints within the mask (if specified)' - 'and then multiply by the value specified by' - 'this parameter. By using the default (1000)' \ - 'output DVARS will be expressed in ' \ - 'x10 % BOLD units compatible with Power et al.' \ - '2012. Set this to 0 to disable intensity' \ - 'normalization altogether.') - + intensity_normalization = traits.Float( + 1000.0, usedefault=True, + desc='Divide value in each voxel at each timepoint ' + 'by the median calculated across all voxels' + 'and timepoints within the mask (if specified)' + 'and then multiply by the value specified by' + 'this parameter. By using the default (1000)' + 'output DVARS will be expressed in ' + 'x10 % BOLD units compatible with Power et al.' + '2012. Set this to 0 to disable intensity' + 'normalization altogether.') class ComputeDVARSOutputSpec(TraitedSpec): @@ -161,8 +162,8 @@ def _run_interface(self, runtime): fig = plot_confound(dvars[0], self.inputs.figsize, 'Standardized DVARS', series_tr=tr) fig.savefig(self._results['fig_std'], dpi=float(self.inputs.figdpi), - format=self.inputs.figformat, - bbox_inches='tight') + format=self.inputs.figformat, + bbox_inches='tight') fig.clf() if self.inputs.save_nstd: @@ -324,7 +325,7 @@ class CompCorInputSpec(BaseInterfaceInputSpec): 'first is the default.')) components_file = traits.Str('components_file.txt', usedefault=True, desc='Filename to store physiological components') - num_components = traits.Int(6, usedefault=True) # 6 for BOLD, 4 for ASL + num_components = traits.Int(6, usedefault=True) # 6 for BOLD, 4 for ASL pre_filter = traits.Enum('polynomial', 'cosine', False, usedefault=True, desc='Detrend time series prior to component ' 'extraction') @@ -403,9 +404,9 @@ class CompCor(BaseInterface): "author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.}," "year = {2007}," "pages = {90-101},}" - ), + ), 'tags': ['method', 'implementation'] - }] + }] def __init__(self, *args, **kwargs): ''' exactly the same as compcor except the header ''' @@ -591,8 +592,8 @@ def _process_masks(self, mask_images, timeseries=None): imgseries = timeseries[mask, :] imgseries = regress_poly(2, imgseries)[0] tSTD = _compute_tSTD(imgseries, 0, axis=-1) - threshold_std = np.percentile(tSTD, np.round(100. * - (1. - self.inputs.percentile_threshold)).astype(int)) + threshold_std = np.percentile(tSTD, np.round( + 100. * (1. - self.inputs.percentile_threshold)).astype(int)) mask_data = np.zeros_like(mask) mask_data[mask != 0] = tSTD >= threshold_std out_image = nb.Nifti1Image(mask_data, affine=img.affine, @@ -710,7 +711,7 @@ class NonSteadyStateDetector(BaseInterface): def _run_interface(self, runtime): in_nii = nb.load(self.inputs.in_file) - global_signal = in_nii.get_data()[:,:,:,:50].mean(axis=0).mean(axis=0).mean(axis=0) + global_signal = in_nii.get_data()[:, :, :, :50].mean(axis=0).mean(axis=0).mean(axis=0) self._results = { 'n_volumes_to_discard': is_outlier(global_signal) @@ -929,7 +930,7 @@ def regress_poly(degree, data, remove_mean=True, axis=-1): data = data.reshape((-1, timepoints)) # Generate design matrix - X = np.ones((timepoints, 1)) # quick way to calc degree 0 + X = np.ones((timepoints, 1)) # quick way to calc degree 0 for i in range(degree): polynomial_func = Legendre.basis(i + 1) value_array = np.linspace(-1, 1, timepoints) @@ -943,7 +944,7 @@ def regress_poly(degree, data, remove_mean=True, axis=-1): # Estimation if remove_mean: datahat = X.dot(betas).T - else: # disregard the first layer of X, which is degree 0 + else: # disregard the first layer of X, which is degree 0 datahat = X[:, 1:].dot(betas[1:, ...]).T regressed_data = data - datahat diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index 0b7e2987d7..e3fef583b7 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range import os import numpy as np @@ -36,10 +37,14 @@ class ICC(BaseInterface): def _run_interface(self, runtime): maskdata = nb.load(self.inputs.mask).get_data() - maskdata = np.logical_not(np.logical_or(maskdata == 0, np.isnan(maskdata))) - - session_datas = [[nb.load(fname, mmap=NUMPY_MMAP).get_data()[maskdata].reshape(-1, 1) for fname in sessions] for sessions in self.inputs.subjects_sessions] - list_of_sessions = [np.dstack(session_data) for session_data in session_datas] + maskdata = np.logical_not( + np.logical_or(maskdata == 0, np.isnan(maskdata))) + + session_datas = [[nb.load(fname, mmap=NUMPY_MMAP).get_data()[ + maskdata].reshape(-1, 1) for fname in sessions] + for sessions in self.inputs.subjects_sessions] + list_of_sessions = [np.dstack(session_data) + for session_data in session_datas] all_data = np.hstack(list_of_sessions) icc = np.zeros(session_datas[0][0].shape) session_F = np.zeros(session_datas[0][0].shape) @@ -123,7 +128,8 @@ def ICC_rep_anova(Y): SSR = SST - SSC - SSE MSR = SSR / dfr - # ICC(3,1) = (mean square subjeT - mean square error) / (mean square subjeT + (k-1)*-mean square error) + # ICC(3,1) = (mean square subjeT - mean square error) / + # (mean square subjeT + (k-1)*-mean square error) ICC = (MSR - MSE) / (MSR + dfc * MSE) e_var = MSE # variance of error diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 18aa7bc864..8c00dd08eb 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -12,7 +12,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import zip, str, bytes import os.path as op @@ -122,13 +123,15 @@ def _run_interface(self, runtime): mesh.points = newpoints w = tvtk.PolyDataWriter() VTKInfo.configure_input_data(w, mesh) - w.file_name = self._gen_fname(self.inputs.points, suffix='warped', ext='.vtk') + w.file_name = self._gen_fname( + self.inputs.points, suffix='warped', ext='.vtk') w.write() return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_points'] = self._gen_fname(self.inputs.points, suffix='warped', + outputs['out_points'] = self._gen_fname(self.inputs.points, + suffix='warped', ext='.vtk') return outputs @@ -358,13 +361,15 @@ def _run_interface(self, runtime): warping /= opfield vtk1.point_data.vectors = warping - writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) + writer = tvtk.PolyDataWriter( + file_name=op.abspath(self.inputs.out_warp)) VTKInfo.configure_input_data(writer, vtk1) writer.write() vtk1.point_data.vectors = None vtk1.points = points1 + warping - writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_file)) + writer = tvtk.PolyDataWriter( + file_name=op.abspath(self.inputs.out_file)) VTKInfo.configure_input_data(writer, vtk1) writer.write() return runtime @@ -389,5 +394,5 @@ class P2PDistance(ComputeMeshWarp): def __init__(self, **inputs): super(P2PDistance, self).__init__(**inputs) - IFLOGGER.warn('This interface has been deprecated since 1.0, please use ' - 'ComputeMeshWarp') + IFLOGGER.warn('This interface has been deprecated since 1.0, please ' + 'use ComputeMeshWarp') diff --git a/nipype/algorithms/metrics.py b/nipype/algorithms/metrics.py index 23963de679..b7fbef127f 100644 --- a/nipype/algorithms/metrics.py +++ b/nipype/algorithms/metrics.py @@ -12,7 +12,8 @@ >>> os.chdir(datadir) ''' -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import zip, range import os @@ -298,8 +299,6 @@ def _run_interface(self, runtime): data1 = data1.astype(np.min_scalar_type(max1)) data2 = nii2.get_data().astype(np.min_scalar_type(max1)) data2[np.logical_or(data1 < 0, np.isnan(data1))] = 0 - max2 = data2.max() - maxlabel = max(max1, max2) if isdefined(self.inputs.mask_volume): maskdata = nb.load(self.inputs.mask_volume).get_data() @@ -630,7 +629,7 @@ class Similarity(BaseInterface): def __init__(self, **inputs): try: package_check('nipy') - except Exception as e: + except Exception: self._have_nipy = False super(Similarity, self).__init__(**inputs) diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index a16507bf36..6d1ed63f20 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) ''' -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, zip, range, open from future.utils import raise_from @@ -260,6 +261,7 @@ def _list_outputs(self): outputs['nifti_file'] = self._gen_output_file_name() return outputs + class GunzipInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True) @@ -410,10 +412,10 @@ def merge_csvs(in_list): for idx, in_file in enumerate(in_list): try: in_array = np.loadtxt(in_file, delimiter=',') - except ValueError as ex: + except ValueError: try: in_array = np.loadtxt(in_file, delimiter=',', skiprows=1) - except ValueError as ex: + except ValueError: with open(in_file, 'r') as first: header_line = first.readline() @@ -424,7 +426,7 @@ def merge_csvs(in_list): in_file, delimiter=',', skiprows=1, usecols=list(range(1, n_cols)) ) - except ValueError as ex: + except ValueError: in_array = np.loadtxt( in_file, delimiter=',', skiprows=1, usecols=list(range(1, n_cols - 1))) if idx == 0: @@ -770,7 +772,7 @@ def _run_interface(self, runtime): import pandas as pd except ImportError as e: raise_from(ImportError('This interface requires pandas ' - '(http://pandas.pydata.org/) to run.'), e) + '(http://pandas.pydata.org/) to run.'), e) try: import lockfile as pl @@ -1179,7 +1181,7 @@ def normalize_tpms(in_files, in_mask=None, out_files=[]): hdr['data_type'] = 16 hdr.set_data_dtype(np.float32) nb.save(nb.Nifti1Image(img_data.astype(np.float32), imgs[0].affine, - hdr), out_files[0]) + hdr), out_files[0]) return out_files[0] img_data = np.array([im.get_data() for im in imgs]).astype(np.float32) @@ -1204,7 +1206,7 @@ def normalize_tpms(in_files, in_mask=None, out_files=[]): hdr['data_type'] = 16 hdr.set_data_dtype('float32') nb.save(nb.Nifti1Image(probmap.astype(np.float32), imgs[i].affine, - hdr), out_file) + hdr), out_file) return out_files @@ -1375,15 +1377,19 @@ def merge_rois(in_files, in_idxs, in_ref, class CalculateMedianInputSpec(BaseInterfaceInputSpec): - in_files = InputMultiPath(File(exists=True, mandatory=True, - desc="One or more realigned Nifti 4D timeseries")) + in_files = InputMultiPath(File( + exists=True, mandatory=True, + desc="One or more realigned Nifti 4D timeseries")) median_file = traits.Str(desc="Filename prefix to store median images") - median_per_file = traits.Bool(False, usedefault=True, - desc="Calculate a median file for each Nifti") + median_per_file = traits.Bool( + False, usedefault=True, + desc="Calculate a median file for each Nifti") + class CalculateMedianOutputSpec(TraitedSpec): median_files = OutputMultiPath(File(exists=True), - desc="One or more median images") + desc="One or more median images") + class CalculateMedian(BaseInterface): """ @@ -1503,6 +1509,7 @@ def __init__(self, **inputs): " please use nipype.algorithms.metrics.FuzzyOverlap"), DeprecationWarning) + class TSNR(confounds.TSNR): """ .. deprecated:: 0.12.1 diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index 2c994bf20d..ca85f5cbb0 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -18,7 +18,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, str, bytes, int from copy import deepcopy @@ -586,9 +587,9 @@ def _generate_design(self, infolist=None): sumscans = out.astype(int) + sum(nscans[0:i]) if out.size == 1: - outliers[0]+= [np.array(sumscans, dtype=int).tolist()] + outliers[0] += [np.array(sumscans, dtype=int).tolist()] else: - outliers[0]+= np.array(sumscans, dtype=int).tolist() + outliers[0] += np.array(sumscans, dtype=int).tolist() self._sessinfo = self._generate_standard_design(concatlist, functional_runs=functional_runs, diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index 0ab74b7404..11d402526f 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -587,7 +587,7 @@ class StimCorrInputSpec(BaseInterfaceInputSpec): intensity_values = InputMultiPath(File(exists=True), mandatory=True, desc=("Name of file containing intensity " - "values")) + "values")) spm_mat_file = File(exists=True, mandatory=True, desc="SPM mat file (use pre-estimate SPM.mat file)") diff --git a/nipype/algorithms/tests/test_compcor.py b/nipype/algorithms/tests/test_compcor.py index 9407e6ef0d..ab4e164004 100644 --- a/nipype/algorithms/tests/test_compcor.py +++ b/nipype/algorithms/tests/test_compcor.py @@ -23,8 +23,8 @@ def setup_class(self, tmpdir): # setup tmpdir.chdir() noise = np.fromfunction(self.fake_noise_fun, self.fake_data.shape) - self.realigned_file = utils.save_toy_nii(self.fake_data + noise, - self.filenames['functionalnii']) + self.realigned_file = utils.save_toy_nii( + self.fake_data + noise, self.filenames['functionalnii']) mask = np.ones(self.fake_data.shape[:3]) mask[0, 0, 0] = 0 mask[0, 0, 1] = 0 @@ -37,7 +37,6 @@ def setup_class(self, tmpdir): self.mask_files = [mask1, mask2] - def test_compcor(self): expected_components = [['-0.1989607212', '-0.5753813646'], ['0.5692369697', '0.5674945949'], @@ -48,14 +47,13 @@ def test_compcor(self): self.run_cc(CompCor(realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0), - expected_components) + expected_components) self.run_cc(ACompCor(realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, components_file='acc_components_file'), - expected_components, 'aCompCor') - + expected_components, 'aCompCor') def test_tcompcor(self): ccinterface = TCompCor(realigned_file=self.realigned_file, @@ -89,26 +87,31 @@ def test_compcor_no_regress_poly(self): def test_tcompcor_asymmetric_dim(self): asymmetric_shape = (2, 3, 4, 5) asymmetric_data = utils.save_toy_nii(np.zeros(asymmetric_shape), - 'asymmetric.nii') + 'asymmetric.nii') TCompCor(realigned_file=asymmetric_data).run() - assert nb.load('mask_000.nii.gz').get_data().shape == asymmetric_shape[:3] + assert nb.load( + 'mask_000.nii.gz').get_data().shape == asymmetric_shape[:3] def test_compcor_bad_input_shapes(self): - shape_less_than = (1, 2, 2, 5) # dim 0 is < dim 0 of self.mask_files (2) - shape_more_than = (3, 3, 3, 5) # dim 0 is > dim 0 of self.mask_files (2) + # dim 0 is < dim 0 of self.mask_files (2) + shape_less_than = (1, 2, 2, 5) + # dim 0 is > dim 0 of self.mask_files (2) + shape_more_than = (3, 3, 3, 5) for data_shape in (shape_less_than, shape_more_than): data_file = utils.save_toy_nii(np.zeros(data_shape), 'temp.nii') interface = CompCor(realigned_file=data_file, mask_files=self.mask_files[0]) - with pytest.raises(ValueError, message="Dimension mismatch"): interface.run() + with pytest.raises(ValueError, message="Dimension mismatch"): + interface.run() def test_tcompcor_bad_input_dim(self): bad_dims = (2, 2, 2) data_file = utils.save_toy_nii(np.zeros(bad_dims), 'temp.nii') interface = TCompCor(realigned_file=data_file) - with pytest.raises(ValueError, message='Not a 4D file'): interface.run() + with pytest.raises(ValueError, message='Not a 4D file'): + interface.run() def test_tcompcor_merge_intersect_masks(self): for method in ['union', 'intersect']: @@ -117,17 +120,17 @@ def test_tcompcor_merge_intersect_masks(self): merge_method=method).run() if method == 'union': assert np.array_equal(nb.load('mask_000.nii.gz').get_data(), - ([[[0,0],[0,0]],[[0,0],[1,0]]])) + ([[[0, 0], [0, 0]], [[0, 0], [1, 0]]])) if method == 'intersect': assert np.array_equal(nb.load('mask_000.nii.gz').get_data(), - ([[[0,0],[0,0]],[[0,1],[0,0]]])) + ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]])) def test_tcompcor_index_mask(self): TCompCor(realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=1).run() assert np.array_equal(nb.load('mask_000.nii.gz').get_data(), - ([[[0,0],[0,0]],[[0,1],[0,0]]])) + ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]])) def test_tcompcor_multi_mask_no_index(self): interface = TCompCor(realigned_file=self.realigned_file, @@ -135,7 +138,8 @@ def test_tcompcor_multi_mask_no_index(self): with pytest.raises(ValueError, message='more than one mask file'): interface.run() - def run_cc(self, ccinterface, expected_components, expected_header='CompCor'): + def run_cc(self, ccinterface, expected_components, + expected_header='CompCor'): # run ccresult = ccinterface.run() @@ -162,12 +166,11 @@ def run_cc(self, ccinterface, expected_components, expected_header='CompCor'): num_got_timepoints = len(components_data) assert num_got_timepoints == self.fake_data.shape[3] for index, timepoint in enumerate(components_data): - assert (len(timepoint) == ccinterface.inputs.num_components - or len(timepoint) == self.fake_data.shape[3]) + assert (len(timepoint) == ccinterface.inputs.num_components or + len(timepoint) == self.fake_data.shape[3]) assert timepoint[:2] == expected_components[index] return ccresult - @staticmethod def fake_noise_fun(i, j, l, m): return m*i + l - j diff --git a/nipype/algorithms/tests/test_confounds.py b/nipype/algorithms/tests/test_confounds.py index 8c2626457e..58940e4842 100644 --- a/nipype/algorithms/tests/test_confounds.py +++ b/nipype/algorithms/tests/test_confounds.py @@ -22,9 +22,10 @@ def test_fd(tmpdir): tempdir = tmpdir.strpath ground_truth = np.loadtxt(example_data('fsl_motion_outliers_fd.txt')) - fdisplacement = FramewiseDisplacement(in_file=example_data('fsl_mcflirt_movpar.txt'), - out_file=tempdir + '/fd.txt', - parameter_source="FSL") + fdisplacement = FramewiseDisplacement( + in_file=example_data('fsl_mcflirt_movpar.txt'), + out_file=tempdir + '/fd.txt', + parameter_source="FSL") res = fdisplacement.run() with open(res.outputs.out_file) as all_lines: @@ -32,7 +33,8 @@ def test_fd(tmpdir): assert 'FramewiseDisplacement' in line break - assert np.allclose(ground_truth, np.loadtxt(res.outputs.out_file, skiprows=1), atol=.16) + assert np.allclose(ground_truth, np.loadtxt( + res.outputs.out_file, skiprows=1), atol=.16) assert np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-2 @@ -40,14 +42,15 @@ def test_fd(tmpdir): def test_dvars(tmpdir): ground_truth = np.loadtxt(example_data('ds003_sub-01_mc.DVARS')) dvars = ComputeDVARS(in_file=example_data('ds003_sub-01_mc.nii.gz'), - in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'), + in_mask=example_data( + 'ds003_sub-01_mc_brainmask.nii.gz'), save_all=True, intensity_normalization=0) tmpdir.chdir() res = dvars.run() dv1 = np.loadtxt(res.outputs.out_all, skiprows=1) - assert (np.abs(dv1[:, 0] - ground_truth[:, 0]).sum()/ len(dv1)) < 0.05 + assert (np.abs(dv1[:, 0] - ground_truth[:, 0]).sum() / len(dv1)) < 0.05 assert (np.abs(dv1[:, 1] - ground_truth[:, 1]).sum() / len(dv1)) < 0.05 @@ -73,4 +76,3 @@ def test_outliers(): in_data[0] += 10 assert is_outlier(in_data) == 1 - diff --git a/nipype/algorithms/tests/test_errormap.py b/nipype/algorithms/tests/test_errormap.py index 4b40d14907..cfd30b0b74 100644 --- a/nipype/algorithms/tests/test_errormap.py +++ b/nipype/algorithms/tests/test_errormap.py @@ -13,8 +13,10 @@ def test_errormap(tmpdir): # Single-Spectual # Make two fake 2*2*2 voxel volumes - volume1 = np.array([[[2.0, 8.0], [1.0, 2.0]], [[1.0, 9.0], [0.0, 3.0]]]) # John von Neumann's birthday - volume2 = np.array([[[0.0, 7.0], [2.0, 3.0]], [[1.0, 9.0], [1.0, 2.0]]]) # Alan Turing's birthday + # John von Neumann's birthday + volume1 = np.array([[[2.0, 8.0], [1.0, 2.0]], [[1.0, 9.0], [0.0, 3.0]]]) + # Alan Turing's birthday + volume2 = np.array([[[0.0, 7.0], [2.0, 3.0]], [[1.0, 9.0], [1.0, 2.0]]]) mask = np.array([[[1, 0], [0, 1]], [[1, 0], [0, 1]]]) img1 = nb.Nifti1Image(volume1, np.eye(4)) @@ -49,7 +51,8 @@ def test_errormap(tmpdir): assert result.outputs.distance == 1.0 # Multi-Spectual - volume3 = np.array([[[1.0, 6.0], [0.0, 3.0]], [[1.0, 9.0], [3.0, 6.0]]]) # Raymond Vahan Damadian's birthday + # Raymond Vahan Damadian's birthday + volume3 = np.array([[[1.0, 6.0], [0.0, 3.0]], [[1.0, 9.0], [3.0, 6.0]]]) msvolume1 = np.zeros(shape=(2, 2, 2, 2)) msvolume1[:, :, :, 0] = volume1 diff --git a/nipype/algorithms/tests/test_icc_anova.py b/nipype/algorithms/tests/test_icc_anova.py index 65b1e9c6ed..c3e2d8b310 100644 --- a/nipype/algorithms/tests/test_icc_anova.py +++ b/nipype/algorithms/tests/test_icc_anova.py @@ -5,8 +5,9 @@ def test_ICC_rep_anova(): - # see table 2 in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass Correlations: Uses in - # Assessing Rater Reliability". Psychological Bulletin 86 (2): 420-428 + # see table 2 in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass + # Correlations: Uses in Assessing Rater Reliability". Psychological + # Bulletin 86 (2): 420-428 Y = np.array([[9, 2, 5, 8], [6, 1, 3, 2], [8, 4, 6, 8], diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index d5fbc56825..53eec75b69 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -32,7 +32,6 @@ def test_ident_distances(tmpdir): @pytest.mark.skipif(VTKInfo.no_tvtk(), reason="tvtk is not installed") def test_trans_distances(tmpdir): - tempdir = tmpdir.strpath from ...interfaces.vtkbase import tvtk in_surf = example_data('surf01.vtk') diff --git a/nipype/algorithms/tests/test_misc.py b/nipype/algorithms/tests/test_misc.py index d148ee8ca1..76c50aeaa8 100644 --- a/nipype/algorithms/tests/test_misc.py +++ b/nipype/algorithms/tests/test_misc.py @@ -35,11 +35,13 @@ def test_CreateNifti(create_analyze_pair_file_in_directory): assert os.path.exists(result.outputs.nifti_file) assert nb.load(result.outputs.nifti_file, mmap=NUMPY_MMAP) + def test_CalculateMedian(create_analyze_pair_file_in_directory): mean = misc.CalculateMedian() - with pytest.raises(TypeError): mean.run() + with pytest.raises(TypeError): + mean.run() mean.inputs.in_files = example_data('ds003_sub-01_mc.nii.gz') eg = mean.run() diff --git a/nipype/algorithms/tests/test_modelgen.py b/nipype/algorithms/tests/test_modelgen.py index 3c9ec4096b..4a45e76c4d 100644 --- a/nipype/algorithms/tests/test_modelgen.py +++ b/nipype/algorithms/tests/test_modelgen.py @@ -24,7 +24,8 @@ def test_modelgen1(tmpdir): s = SpecifyModel() s.inputs.input_units = 'scans' set_output_units = lambda: setattr(s.inputs, 'output_units', 'scans') - with pytest.raises(TraitError): set_output_units() + with pytest.raises(TraitError): + set_output_units() s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 s.inputs.high_pass_filter_cutoff = 128. @@ -153,4 +154,3 @@ def test_modelgen_sparse(tmpdir): assert len(res.outputs.session_info[0]['regress']) == 2 npt.assert_almost_equal(res.outputs.session_info[0]['regress'][0]['val'][0], 0.016675298129743384) npt.assert_almost_equal(res.outputs.session_info[1]['regress'][1]['val'][5], 0.007671459162258378) - diff --git a/nipype/algorithms/tests/test_moments.py b/nipype/algorithms/tests/test_moments.py index 17c8e922b2..c464e145d7 100644 --- a/nipype/algorithms/tests/test_moments.py +++ b/nipype/algorithms/tests/test_moments.py @@ -129,8 +129,6 @@ def test_skew(tmpdir): f.write(data) skewness = calc_moments(f.strpath, 3) assert np.allclose(skewness, np.array( - [-0.23418937314622, 0.2946365564954823, -0.05781002053540932, - -0.3512508282578762, - - 0.07035664150233077, - - 0.01935867699166935, - 0.00483863369427428, 0.21879460029850167])) + [-0.23418937314622, 0.2946365564954823, -0.05781002053540932, + -0.3512508282578762, -0.07035664150233077, -0.01935867699166935, + 0.00483863369427428, 0.21879460029850167])) diff --git a/nipype/algorithms/tests/test_normalize_tpms.py b/nipype/algorithms/tests/test_normalize_tpms.py index 5d0fc5c47b..a65cc66770 100644 --- a/nipype/algorithms/tests/test_normalize_tpms.py +++ b/nipype/algorithms/tests/test_normalize_tpms.py @@ -51,4 +51,3 @@ def test_normalize_tpms(tmpdir): assert np.allclose(normdata, mapdata[i]) assert np.allclose(sumdata[sumdata > 0.0], 1.0) - diff --git a/nipype/algorithms/tests/test_overlap.py b/nipype/algorithms/tests/test_overlap.py index e0ec5bcfcb..6c2df79bdd 100644 --- a/nipype/algorithms/tests/test_overlap.py +++ b/nipype/algorithms/tests/test_overlap.py @@ -41,4 +41,3 @@ def check_close(val1, val2): check_close(res.outputs.jaccard, 0.99705) check_close(res.outputs.roi_voldiff, np.array([0.0063086, -0.0025506, 0.0])) - diff --git a/nipype/algorithms/tests/test_rapidart.py b/nipype/algorithms/tests/test_rapidart.py index 69e6334448..2a03232054 100644 --- a/nipype/algorithms/tests/test_rapidart.py +++ b/nipype/algorithms/tests/test_rapidart.py @@ -43,17 +43,20 @@ def test_ad_get_affine_matrix(): # test rotation params = np.array([0, 0, 0, np.pi / 2, np.pi / 2, np.pi / 2]) matrix = ra._get_affine_matrix(params, 'SPM') - out = np.array([0, 0, 1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1]).reshape((4, 4)) + out = np.array([0, 0, 1, 0, 0, -1, 0, 0, 1, 0, + 0, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_almost_equal(matrix, out) # test scaling params = np.array([0, 0, 0, 0, 0, 0, 1, 2, 3]) matrix = ra._get_affine_matrix(params, 'SPM') - out = np.array([1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 3, 0, 0, 0, 0, 1]).reshape((4, 4)) + out = np.array([1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 3, + 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_equal(matrix, out) # test shear params = np.array([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 3]) matrix = ra._get_affine_matrix(params, 'SPM') - out = np.array([1, 1, 2, 0, 0, 1, 3, 0, 0, 0, 1, 0, 0, 0, 0, 1]).reshape((4, 4)) + out = np.array([1, 1, 2, 0, 0, 1, 3, 0, 0, 0, 1, + 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_equal(matrix, out) @@ -62,7 +65,8 @@ def test_ad_get_norm(): np.pi / 4, 0, 0, 0, -np.pi / 4, -np.pi / 4, -np.pi / 4]).reshape((3, 6)) norm, _ = ra._calc_norm(params, False, 'SPM') - npt.assert_almost_equal(norm, np.array([18.86436316, 37.74610158, 31.29780829])) + npt.assert_almost_equal(norm, np.array( + [18.86436316, 37.74610158, 31.29780829])) norm, _ = ra._calc_norm(params, True, 'SPM') npt.assert_almost_equal(norm, np.array([0., 143.72192614, 173.92527131])) diff --git a/nipype/algorithms/tests/test_tsnr.py b/nipype/algorithms/tests/test_tsnr.py index f4bac9a17d..4c0f0c0b1a 100644 --- a/nipype/algorithms/tests/test_tsnr.py +++ b/nipype/algorithms/tests/test_tsnr.py @@ -20,9 +20,9 @@ class TestTSNR(): 'in_file': 'tsnrinfile.nii', } - out_filenames = {# default output file names + out_filenames = { # default output file names 'detrended_file': 'detrend.nii.gz', - 'mean_file': 'mean.nii.gz', + 'mean_file': 'mean.nii.gz', 'stddev_file': 'stdev.nii.gz', 'tsnr_file': 'tsnr.nii.gz' } @@ -86,12 +86,14 @@ def test_tsnr_withpoly3(self): @mock.patch('warnings.warn') def test_warning(self, mock_warn): - ''' test that usage of misc.TSNR trips a warning to use confounds.TSNR instead ''' + ''' test that usage of misc.TSNR trips a warning to use + confounds.TSNR instead ''' # run misc.TSNR(in_file=self.in_filenames['in_file']) # assert - assert True in [args[0].count('confounds') > 0 for _, args, _ in mock_warn.mock_calls] + assert True in [args[0].count( + 'confounds') > 0 for _, args, _ in mock_warn.mock_calls] def assert_expected_outputs_poly(self, tsnrresult, expected_ranges): assert os.path.basename(tsnrresult.outputs.detrended_file) == \ @@ -116,7 +118,6 @@ def assert_unchanged(self, expected_ranges): npt.assert_almost_equal(np.amin(data), min_, decimal=1) npt.assert_almost_equal(np.amax(data), max_, decimal=1) - fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], diff --git a/nipype/caching/memory.py b/nipype/caching/memory.py index f3323180d9..94d0075adc 100644 --- a/nipype/caching/memory.py +++ b/nipype/caching/memory.py @@ -9,7 +9,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import object, open import os @@ -23,7 +24,7 @@ from ..pipeline.engine import Node from ..pipeline.engine.utils import modify_paths -################################################################################ +############################################################################### # PipeFunc object: callable interface to nipype.interface objects @@ -85,8 +86,8 @@ def __call__(self, **kwargs): try: out = node.run() finally: - # node.run() changes to the node directory - if something goes wrong - # before it cds back you would end up in strange places + # node.run() changes to the node directory - if something goes + # wrong before it cds back you would end up in strange places os.chdir(cwd) if self.callback is not None: self.callback(dir_name, job_name) @@ -94,10 +95,10 @@ def __call__(self, **kwargs): def __repr__(self): return '{}({}.{}}, base_dir={})'.format( - self.__class__.__name__, self.interface.__module__, self.interface.__name__, - self.base_dir) + self.__class__.__name__, self.interface.__module__, + self.interface.__name__, self.base_dir) -################################################################################ +############################################################################### # Memory manager: provide some tracking about what is computed when, to # be able to flush the disk @@ -242,7 +243,8 @@ def _log_name(self, dir_name, job_name): except OSError: "Dir exists" - with open(os.path.join(month_dir, '%02i.log' % t.tm_mday), 'a') as rotatefile: + with open(os.path.join( + month_dir, '%02i.log' % t.tm_mday), 'a') as rotatefile: rotatefile.write('%s/%s\n' % (dir_name, job_name)) def clear_previous_runs(self, warn=True): diff --git a/nipype/caching/tests/test_memory.py b/nipype/caching/tests/test_memory.py index 50f56d4700..a33cec9491 100644 --- a/nipype/caching/tests/test_memory.py +++ b/nipype/caching/tests/test_memory.py @@ -40,4 +40,3 @@ def test_caching(tmpdir): assert results.outputs.output1 == [1, 1] finally: config.set('execution', 'stop_on_first_rerun', old_rerun) - diff --git a/nipype/conftest.py b/nipype/conftest.py index 27a3789ea4..ada8e08fb9 100644 --- a/nipype/conftest.py +++ b/nipype/conftest.py @@ -1,12 +1,13 @@ import pytest -import numpy, os +import numpy +import os + @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = numpy doctest_namespace['os'] = os - filepath = os.path.dirname(os.path.realpath(__file__)) datadir = os.path.realpath(os.path.join(filepath, 'testing/data')) doctest_namespace["datadir"] = datadir diff --git a/nipype/external/fsl_imglob.py b/nipype/external/fsl_imglob.py index f77041c575..6fc07ade41 100755 --- a/nipype/external/fsl_imglob.py +++ b/nipype/external/fsl_imglob.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # imglob - expand list of image filenames -# Stephen Smith, Mark Jenkinson and Matthew Webster FMRIB Image Analysis Group +# Stephen Smith, Mark Jenkinson & Matthew Webster FMRIB Image Analysis Group # Copyright (C) 2009 University of Oxford # Part of FSL - FMRIB's Software Library # http://www.fmrib.ox.ac.uk/fsl @@ -68,6 +68,7 @@ import glob from builtins import range + def usage(): print("Usage: $0 [-extension/extensions] ") print(" -extension for one image with full extension") @@ -116,18 +117,21 @@ def main(): filelist = [] for arg in range(startingArg, len(sys.argv)): - # if isImage(sys.argv[arg],allExtensions)[0]: #These enable a "pedantic" style mode currently not used + # #These if enables a "pedantic" style mode currently not used + # if isImage(sys.argv[arg],allExtensions)[0]: # filelist.extend(glob.glob(sys.argv[arg])) # else: # for currentExtension in validExtensions: # filelist.extend(glob.glob(sys.argv[arg]+currentExtension)) for currentExtension in validExtensions: filelist.extend( - glob.glob(removeImageExtension(sys.argv[arg], allExtensions)+currentExtension)) + glob.glob(removeImageExtension( + sys.argv[arg], allExtensions)+currentExtension)) if deleteExtensions: for file in range(0, len(filelist)): - filelist[file] = removeImageExtension(filelist[file], allExtensions) + filelist[file] = removeImageExtension(filelist[file], + allExtensions) if setAvailable: filelist = list(set(filelist)) else: diff --git a/nipype/external/portalocker.py b/nipype/external/portalocker.py index 9cb186743c..bb6a3bef79 100644 --- a/nipype/external/portalocker.py +++ b/nipype/external/portalocker.py @@ -49,7 +49,8 @@ Version: $Id: portalocker.py 5474 2008-05-16 20:53:50Z lowell $ ''' -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open __all__ = [ @@ -91,11 +92,12 @@ def lock(file, flags): try: win32file.LockFileEx(hfile, flags, 0, -0x10000, __overlapped) except pywintypes.error as exc_value: - # error: (33, 'LockFileEx', 'The process cannot access the file because another process has locked a portion of the file.') + # error: (33, 'LockFileEx', 'The process cannot access the file + # because another process has locked a portion of the file.') if exc_value[0] == 33: raise LockException(LockException.LOCK_FAILED, exc_value[2]) else: - # Q: Are there exceptions/codes we should be dealing with here? + # Q: Are there exceptions/codes we should be dealing with here? raise def unlock(file): @@ -104,11 +106,12 @@ def unlock(file): win32file.UnlockFileEx(hfile, 0, -0x10000, __overlapped) except pywintypes.error as exc_value: if exc_value[0] == 158: - # error: (158, 'UnlockFileEx', 'The segment is already unlocked.') - # To match the 'posix' implementation, silently ignore this error + # error: (158, 'UnlockFileEx', 'The segment is already + # unlocked.') To match the 'posix' implementation, silently + # ignore this error pass else: - # Q: Are there exceptions/codes we should be dealing with here? + # Q: Are there exceptions/codes we should be dealing with here? raise elif os.name == 'posix': diff --git a/nipype/info.py b/nipype/info.py index 8714f99707..134c087bef 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -2,7 +2,8 @@ settings in setup.py, the nipy top-level docstring, and for building the docs. In setup.py in particular, we exec this file, so it cannot import nipy """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import sys @@ -25,8 +26,8 @@ def get_nipype_gitversion(): import subprocess try: import nipype - gitpath = os.path.realpath(os.path.join(os.path.dirname(nipype.__file__), - os.path.pardir)) + gitpath = os.path.realpath( + os.path.join(os.path.dirname(nipype.__file__), os.path.pardir)) except: gitpath = os.getcwd() gitpathgit = os.path.join(gitpath, '.git') @@ -81,9 +82,9 @@ def get_nipype_gitversion(): packages (e.g., AFNI, ANTS, BRAINS, BrainSuite, Camino, FreeSurfer, FSL, MNE, \ MRtrix, MNE, Nipy, Slicer, SPM), eases the design of workflows within and \ between packages, and reduces the learning curve necessary to use different \ -packages. Nipype is creating a collaborative platform for neuroimaging software \ -development in a high-level language and addressing limitations of existing \ -pipeline systems. +packages. Nipype is creating a collaborative platform for neuroimaging \ +software development in a high-level language and addressing limitations of \ +existing pipeline systems. *Nipype* allows you to: @@ -162,7 +163,7 @@ def get_nipype_gitversion(): 'profiler': ['psutil>=5.0'], 'duecredit': ['duecredit'], 'xvfbwrapper': ['xvfbwrapper'], - 'pybids' : ['pybids'] + 'pybids': ['pybids'] # 'mesh': ['mayavi'] # Enable when it works } diff --git a/nipype/interfaces/__init__.py b/nipype/interfaces/__init__.py index 8f8b5d25c2..afae87d483 100644 --- a/nipype/interfaces/__init__.py +++ b/nipype/interfaces/__init__.py @@ -6,7 +6,8 @@ Requires Packages to be installed """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) __docformat__ = 'restructuredtext' from .io import DataGrabber, DataSink, SelectFiles diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index 1097a28d46..e16a5fda57 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -2,7 +2,8 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provide interface to AFNI commands.""" -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import object, str from future.utils import raise_from @@ -104,6 +105,7 @@ class AFNICommandBase(CommandLine): A base class to fix a linking problem in OSX and afni. See http://afni.nimh.nih.gov/afni/community/board/read.php?1,145346,145347#msg-145347 """ + def _run_interface(self, runtime): if platform == 'darwin': runtime.environ['DYLD_FALLBACK_LIBRARY_PATH'] = '/usr/local/afni/' @@ -130,31 +132,32 @@ class AFNICommand(AFNICommandBase): input_spec = AFNICommandInputSpec _outputtype = None - references_ = [{'entry': BibTeX('@article{Cox1996,' - 'author={R.W. Cox},' - 'title={AFNI: software for analysis and ' - 'visualization of functional magnetic ' - 'resonance neuroimages},' - 'journal={Computers and Biomedical research},' - 'volume={29},' - 'number={3},' - 'pages={162-173},' - 'year={1996},' - '}'), - 'tags': ['implementation'], - }, - {'entry': BibTeX('@article{CoxHyde1997,' - 'author={R.W. Cox and J.S. Hyde},' - 'title={Software tools for analysis and ' - 'visualization of fMRI data},' - 'journal={NMR in Biomedicine},' - 'volume={10},' - 'number={45},' - 'pages={171-178},' - 'year={1997},' - '}'), - 'tags': ['implementation'], - }] + references_ = [ + {'entry': BibTeX('@article{Cox1996,' + 'author={R.W. Cox},' + 'title={AFNI: software for analysis and ' + 'visualization of functional magnetic ' + 'resonance neuroimages},' + 'journal={Computers and Biomedical research},' + 'volume={29},' + 'number={3},' + 'pages={162-173},' + 'year={1996},' + '}'), + 'tags': ['implementation'], + }, + {'entry': BibTeX('@article{CoxHyde1997,' + 'author={R.W. Cox and J.S. Hyde},' + 'title={Software tools for analysis and ' + 'visualization of fMRI data},' + 'journal={NMR in Biomedicine},' + 'volume={10},' + 'number={45},' + 'pages={171-178},' + 'year={1997},' + '}'), + 'tags': ['implementation'], + }] @property def num_threads(self): @@ -207,7 +210,8 @@ def _output_update(self): def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) - return os.path.join(path, base + Info.output_type_to_ext(self.inputs.outputtype)) + return os.path.join(path, base + Info.output_type_to_ext( + self.inputs.outputtype)) def _list_outputs(self): outputs = super(AFNICommand, self)._list_outputs() @@ -286,11 +290,13 @@ class AFNIPythonCommandInputSpec(CommandLineInputSpec): class AFNIPythonCommand(AFNICommand): @property def cmd(self): - if spawn.find_executable(super(AFNIPythonCommand, self).cmd) is not None: + if spawn.find_executable( + super(AFNIPythonCommand, self).cmd) is not None: return spawn.find_executable(super(AFNIPythonCommand, self).cmd) else: return super(AFNIPythonCommand, self).cmd @property def cmdline(self): - return "{} {}".format(self.inputs.py27_path, super(AFNIPythonCommand, self).cmdline) + return "{} {}".format( + self.inputs.py27_path, super(AFNIPythonCommand, self).cmdline) diff --git a/nipype/interfaces/afni/model.py b/nipype/interfaces/afni/model.py index d5730d15f9..3ac47d99d6 100644 --- a/nipype/interfaces/afni/model.py +++ b/nipype/interfaces/afni/model.py @@ -12,7 +12,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os @@ -24,6 +25,7 @@ from .base import ( AFNICommandBase, AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec) + class DeconvolveInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( File( @@ -192,7 +194,7 @@ class DeconvolveInputSpec(AFNICommandInputSpec): position=-6) stim_times = traits.List( traits.Tuple(traits.Int(desc='k-th response model'), - File(desc='stimulus timing file',exists=True), + File(desc='stimulus timing file', exists=True), Str(desc='model')), desc='generate a response model from a set of stimulus times' ' given in file.', @@ -275,7 +277,7 @@ def _format_arg(self, name, trait_spec, value): if val.startswith('SYM: '): value[n] = val.lstrip('SYM: ') - return super(Deconvolve,self)._format_arg(name, trait_spec, value) + return super(Deconvolve, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): if skip is None: @@ -413,7 +415,7 @@ class RemlfitInputSpec(AFNICommandInputSpec): 'mean removed (as is done in Deconvolve); this option turns this ' 'centering off', argstr='-nodmbase', - requires=['addbase','dsort']) + requires=['addbase', 'dsort']) dsort = File( desc='4D dataset to be used as voxelwise baseline regressor', exists=True, @@ -637,7 +639,7 @@ class SynthesizeInputSpec(AFNICommandInputSpec): 'TR is read from the header of the matrix file.', argstr='-TR %f') cenfill = traits.Enum( - 'zero','nbhr','none', + 'zero', 'nbhr', 'none', argstr='-cenfill %s', desc='Determines how censored time points from the ' '3dDeconvolve run will be filled. Valid types ' diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 5d90591953..70b7e12acc 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -9,7 +9,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os @@ -47,6 +48,7 @@ class CentralityInputSpec(AFNICommandInputSpec): desc='Mask the dataset to target brain-only voxels', argstr='-automask') + class AlignEpiAnatPyInputSpec(AFNIPythonCommandInputSpec): in_file = File( desc='EPI dataset to align', @@ -126,6 +128,7 @@ class AlignEpiAnatPyOutputSpec(TraitedSpec): skullstrip = File( desc="skull-stripped (not aligned) volume") + class AlignEpiAnatPy(AFNIPythonCommand): """Align EPI to anatomical datasets or vice versa This Python script computes the alignment between two datasets, typically @@ -206,6 +209,7 @@ def _list_outputs(self): outputs.skullstrip = self._gen_fname(anat_prefix, suffix='_ns'+'+orig', ext=ext) return outputs + class AllineateInputSpec(AFNICommandInputSpec): in_file = File( desc='input file to 3dAllineate', @@ -226,7 +230,7 @@ class AllineateInputSpec(AFNICommandInputSpec): out_param_file = File( argstr='-1Dparam_save %s', desc='Save the warp parameters in ASCII (.1D) format.', - xor=['in_param_file','allcostx']) + xor=['in_param_file', 'allcostx']) in_param_file = File( exists=True, argstr='-1Dparam_apply %s', @@ -236,7 +240,7 @@ class AllineateInputSpec(AFNICommandInputSpec): out_matrix = File( argstr='-1Dmatrix_save %s', desc='Save the transformation matrix for each volume.', - xor=['in_matrix','allcostx']) + xor=['in_matrix', 'allcostx']) in_matrix = File( desc='matrix to align input file', argstr='-1Dmatrix_apply %s', @@ -246,7 +250,7 @@ class AllineateInputSpec(AFNICommandInputSpec): desc='overwrite output file if it already exists', argstr='-overwrite') - allcostx= File( + allcostx = File( desc='Compute and print ALL available cost functionals for the un-warped inputs' 'AND THEN QUIT. If you use this option none of the other expected outputs will be produced', argstr='-allcostx |& tee %s', @@ -518,8 +522,8 @@ def _list_outputs(self): outputs['out_param_file'] = op.abspath(self.inputs.out_param_file) if isdefined(self.inputs.allcostx): - outputs['allcostX'] = os.path.abspath(os.path.join(os.getcwd(), - self.inputs.allcostx)) + outputs['allcostX'] = os.path.abspath( + os.path.join(os.getcwd(), self.inputs.allcostx)) return outputs def _gen_filename(self, name): @@ -659,6 +663,7 @@ class Automask(AFNICommand): input_spec = AutomaskInputSpec output_spec = AutomaskOutputSpec + class AutoTLRCInputSpec(CommandLineInputSpec): outputtype = traits.Enum('AFNI', list(Info.ftypes.keys()), desc='AFNI output filetype') @@ -671,65 +676,66 @@ class AutoTLRCInputSpec(CommandLineInputSpec): exists=True, copyfile=False) base = traits.Str( - desc = ' Reference anatomical volume' - ' Usually this volume is in some standard space like' - ' TLRC or MNI space and with afni dataset view of' - ' (+tlrc).' - ' Preferably, this reference volume should have had' - ' the skull removed but that is not mandatory.' - ' AFNI\'s distribution contains several templates.' - ' For a longer list, use "whereami -show_templates"' - 'TT_N27+tlrc --> Single subject, skull stripped volume.' - ' This volume is also known as ' - ' N27_SurfVol_NoSkull+tlrc elsewhere in ' - ' AFNI and SUMA land.' - ' (www.loni.ucla.edu, www.bic.mni.mcgill.ca)' - ' This template has a full set of FreeSurfer' - ' (surfer.nmr.mgh.harvard.edu)' - ' surface models that can be used in SUMA. ' - ' For details, see Talairach-related link:' - ' https://afni.nimh.nih.gov/afni/suma' - 'TT_icbm452+tlrc --> Average volume of 452 normal brains.' - ' Skull Stripped. (www.loni.ucla.edu)' - 'TT_avg152T1+tlrc --> Average volume of 152 normal brains.' - ' Skull Stripped.(www.bic.mni.mcgill.ca)' - 'TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1' - ' TT_avg152 and TT_EPI volume sources are from' - ' SPM\'s distribution. (www.fil.ion.ucl.ac.uk/spm/)' - 'If you do not specify a path for the template, the script' - 'will attempt to locate the template AFNI\'s binaries directory.' - 'NOTE: These datasets have been slightly modified from' - ' their original size to match the standard TLRC' - ' dimensions (Jean Talairach and Pierre Tournoux' - ' Co-Planar Stereotaxic Atlas of the Human Brain' - ' Thieme Medical Publishers, New York, 1988). ' - ' That was done for internal consistency in AFNI.' - ' You may use the original form of these' - ' volumes if you choose but your TLRC coordinates' - ' will not be consistent with AFNI\'s TLRC database' - ' (San Antonio Talairach Daemon database), for example.', - mandatory = True, + desc=' Reference anatomical volume' + ' Usually this volume is in some standard space like' + ' TLRC or MNI space and with afni dataset view of' + ' (+tlrc).' + ' Preferably, this reference volume should have had' + ' the skull removed but that is not mandatory.' + ' AFNI\'s distribution contains several templates.' + ' For a longer list, use "whereami -show_templates"' + 'TT_N27+tlrc --> Single subject, skull stripped volume.' + ' This volume is also known as ' + ' N27_SurfVol_NoSkull+tlrc elsewhere in ' + ' AFNI and SUMA land.' + ' (www.loni.ucla.edu, www.bic.mni.mcgill.ca)' + ' This template has a full set of FreeSurfer' + ' (surfer.nmr.mgh.harvard.edu)' + ' surface models that can be used in SUMA. ' + ' For details, see Talairach-related link:' + ' https://afni.nimh.nih.gov/afni/suma' + 'TT_icbm452+tlrc --> Average volume of 452 normal brains.' + ' Skull Stripped. (www.loni.ucla.edu)' + 'TT_avg152T1+tlrc --> Average volume of 152 normal brains.' + ' Skull Stripped.(www.bic.mni.mcgill.ca)' + 'TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1' + ' TT_avg152 and TT_EPI volume sources are from' + ' SPM\'s distribution. (www.fil.ion.ucl.ac.uk/spm/)' + 'If you do not specify a path for the template, the script' + 'will attempt to locate the template AFNI\'s binaries directory.' + 'NOTE: These datasets have been slightly modified from' + ' their original size to match the standard TLRC' + ' dimensions (Jean Talairach and Pierre Tournoux' + ' Co-Planar Stereotaxic Atlas of the Human Brain' + ' Thieme Medical Publishers, New York, 1988). ' + ' That was done for internal consistency in AFNI.' + ' You may use the original form of these' + ' volumes if you choose but your TLRC coordinates' + ' will not be consistent with AFNI\'s TLRC database' + ' (San Antonio Talairach Daemon database), for example.', + mandatory=True, argstr='-base %s') no_ss = traits.Bool( desc='Do not strip skull of input data set' - '(because skull has already been removed' - 'or because template still has the skull)' - 'NOTE: The -no_ss option is not all that optional.' - ' Here is a table of when you should and should not use -no_ss' - ' Template Template' - ' WITH skull WITHOUT skull' - ' Dset.' - ' WITH skull -no_ss xxx ' - ' ' - ' WITHOUT skull No Cigar -no_ss' - ' ' - ' Template means: Your template of choice' - ' Dset. means: Your anatomical dataset' - ' -no_ss means: Skull stripping should not be attempted on Dset' - ' xxx means: Don\'t put anything, the script will strip Dset' - ' No Cigar means: Don\'t try that combination, it makes no sense.', + '(because skull has already been removed' + 'or because template still has the skull)' + 'NOTE: The -no_ss option is not all that optional.' + ' Here is a table of when you should and should not use -no_ss' + ' Template Template' + ' WITH skull WITHOUT skull' + ' Dset.' + ' WITH skull -no_ss xxx ' + ' ' + ' WITHOUT skull No Cigar -no_ss' + ' ' + ' Template means: Your template of choice' + ' Dset. means: Your anatomical dataset' + ' -no_ss means: Skull stripping should not be attempted on Dset' + ' xxx means: Don\'t put anything, the script will strip Dset' + ' No Cigar means: Don\'t try that combination, it makes no sense.', argstr='-no_ss') + class AutoTLRC(AFNICommand): """A minmal wrapper for the AutoTLRC script The only option currently supported is no_ss. @@ -751,12 +757,14 @@ class AutoTLRC(AFNICommand): _cmd = '@auto_tlrc' input_spec = AutoTLRCInputSpec output_spec = AFNICommandOutputSpec + def _list_outputs(self): outputs = self.output_spec().get() ext = '.HEAD' outputs['out_file'] = os.path.abspath(self._gen_fname(self.inputs.in_file, suffix='+tlrc')+ext) return outputs + class BandpassInputSpec(AFNICommandInputSpec): in_file = File( desc='input file to 3dBandpass', @@ -1482,7 +1490,6 @@ def _parse_inputs(self, skip=None): skip += ['out_show'] return super(Hist, self)._parse_inputs(skip=skip) - def _list_outputs(self): outputs = super(Hist, self)._list_outputs() outputs['out_file'] += '.niml.hist' @@ -1892,7 +1899,8 @@ class ROIStatsInputSpec(CommandLineInputSpec): desc='execute quietly', argstr='-quiet', position=1) - terminal_output = traits.Enum('allatonce', deprecated='1.0.0', + terminal_output = traits.Enum( + 'allatonce', deprecated='1.0.0', desc='Control terminal output:`allatonce` - waits till command is ' 'finished to display output', nohash=True) @@ -1932,7 +1940,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() output_filename = 'roi_stats.csv' with open(output_filename, 'w') as f: - f.write(runtime.stdout) + f.write(runtime.stdout) outputs.stats = os.path.abspath(output_filename) return outputs @@ -2634,7 +2642,7 @@ class VolregInputSpec(AFNICommandInputSpec): keep_extension=True, name_source='in_file') interp = traits.Enum( - ('Fourier', 'cubic', 'heptic', 'quintic','linear'), + ('Fourier', 'cubic', 'heptic', 'quintic', 'linear'), desc='spatial interpolation methods [default = heptic]', argstr='-%s') @@ -2803,15 +2811,15 @@ class QwarpPlusMinusInputSpec(CommandLineInputSpec): blur = traits.List( traits.Float(), desc="Gaussian blur the input images by (FWHM) voxels " - "before doing the alignment (the output dataset " - "will not be blurred). The default is 2.345 (for " - "no good reason). Optionally, you can provide 2 " - "values, and then the first one is applied to the " - "base volume, the second to the source volume. A " - "negative blur radius means to use 3D median " - "filtering, rather than Gaussian blurring. This " - "type of filtering will better preserve edges, " - "which can be important in alignment.", + "before doing the alignment (the output dataset " + "will not be blurred). The default is 2.345 (for " + "no good reason). Optionally, you can provide 2 " + "values, and then the first one is applied to the " + "base volume, the second to the source volume. A " + "negative blur radius means to use 3D median " + "filtering, rather than Gaussian blurring. This " + "type of filtering will better preserve edges, " + "which can be important in alignment.", argstr='-blur %s', minlen=1, maxlen=2) @@ -3397,7 +3405,6 @@ class QwarpInputSpec(AFNICommandInputSpec): xor=['nmi', 'hel', 'lpc', 'lpa', 'pear']) - class QwarpOutputSpec(TraitedSpec): warped_source = File( desc='Warped source file. If plusminus is used, this is the undistorted' @@ -3514,7 +3521,7 @@ def _list_outputs(self): if not isdefined(self.inputs.out_file): prefix = self._gen_fname(self.inputs.in_file, suffix='_QW') ext = '.HEAD' - suffix ='+tlrc' + suffix = '+tlrc' else: prefix = self.inputs.out_file ext_ind = max([prefix.lower().rfind('.nii.gz'), @@ -3528,23 +3535,23 @@ def _list_outputs(self): outputs['warped_source'] = fname_presuffix(prefix, suffix=suffix, use_ext=False) + ext if not self.inputs.nowarp: - outputs['source_warp'] = fname_presuffix(prefix, - suffix='_WARP' + suffix, use_ext=False) + ext + outputs['source_warp'] = fname_presuffix( + prefix, suffix='_WARP' + suffix, use_ext=False) + ext if self.inputs.iwarp: - outputs['base_warp'] = fname_presuffix(prefix, - suffix='_WARPINV' + suffix, use_ext=False) + ext + outputs['base_warp'] = fname_presuffix( + prefix, suffix='_WARPINV' + suffix, use_ext=False) + ext if isdefined(self.inputs.out_weight_file): outputs['weights'] = os.path.abspath(self.inputs.out_weight_file) if self.inputs.plusminus: - outputs['warped_source'] = fname_presuffix(prefix, - suffix='_PLUS' + suffix, use_ext=False) + ext - outputs['warped_base'] = fname_presuffix(prefix, - suffix='_MINUS' + suffix, use_ext=False) + ext - outputs['source_warp'] = fname_presuffix(prefix, - suffix='_PLUS_WARP' + suffix, use_ext=False) + ext - outputs['base_warp'] = fname_presuffix(prefix, - suffix='_MINUS_WARP' + suffix, use_ext=False) + ext + outputs['warped_source'] = fname_presuffix( + prefix, suffix='_PLUS' + suffix, use_ext=False) + ext + outputs['warped_base'] = fname_presuffix( + prefix, suffix='_MINUS' + suffix, use_ext=False) + ext + outputs['source_warp'] = fname_presuffix( + prefix, suffix='_PLUS_WARP' + suffix, use_ext=False) + ext + outputs['base_warp'] = fname_presuffix( + prefix, suffix='_MINUS_WARP' + suffix, use_ext=False) + ext return outputs def _gen_filename(self, name): diff --git a/nipype/interfaces/afni/svm.py b/nipype/interfaces/afni/svm.py index e9ea02a57b..b30b1fb953 100644 --- a/nipype/interfaces/afni/svm.py +++ b/nipype/interfaces/afni/svm.py @@ -9,7 +9,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from ..base import TraitedSpec, traits, File from .base import AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec diff --git a/nipype/interfaces/afni/utils.py b/nipype/interfaces/afni/utils.py index 8ddc340858..6be326fe96 100644 --- a/nipype/interfaces/afni/utils.py +++ b/nipype/interfaces/afni/utils.py @@ -12,7 +12,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, bytes import os @@ -29,6 +30,7 @@ AFNICommandBase, AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec, AFNIPythonCommandInputSpec, AFNIPythonCommand) + class ABoverlapInputSpec(AFNICommandInputSpec): in_file_a = File( desc='input file A', @@ -261,7 +263,8 @@ class BrickStatInputSpec(CommandLineInputSpec): var = traits.Bool( desc='print the variance in the dataset', argstr='-var') - percentile = traits.Tuple(traits.Float, traits.Float, traits.Float, + percentile = traits.Tuple( + traits.Float, traits.Float, traits.Float, desc='p0 ps p1 write the percentile values starting ' 'at p0% and ending at p1% at a step of ps%. ' 'only one sub-brick is accepted.', @@ -325,57 +328,58 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): return outputs + class BucketInputSpec(AFNICommandInputSpec): in_file = traits.List( traits.Tuple( (File( exists=True, copyfile=False), - traits.Str(argstr="'%s'")), + traits.Str(argstr="'%s'")), artstr="%s%s"), position=-1, mandatory=True, argstr="%s", desc='List of tuples of input datasets and subbrick selection strings' - 'as described in more detail in the following afni help string' - 'Input dataset specified using one of these forms:' - ' \'prefix+view\', \'prefix+view.HEAD\', or \'prefix+view.BRIK\'.' - 'You can also add a sub-brick selection list after the end of the' - 'dataset name. This allows only a subset of the sub-bricks to be' - 'included into the output (by default, all of the input dataset' - 'is copied into the output). A sub-brick selection list looks like' - 'one of the following forms:' - ' fred+orig[5] ==> use only sub-brick #5' - ' fred+orig[5,9,17] ==> use #5, #9, and #17' - ' fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8' - ' fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13' - 'Sub-brick indexes start at 0. You can use the character \'$\'' - 'to indicate the last sub-brick in a dataset; for example, you' - 'can select every third sub-brick by using the selection list' - ' fred+orig[0..$(3)]' - 'N.B.: The sub-bricks are output in the order specified, which may' - ' not be the order in the original datasets. For example, using' - ' fred+orig[0..$(2),1..$(2)]' - ' will cause the sub-bricks in fred+orig to be output into the' - ' new dataset in an interleaved fashion. Using' - ' fred+orig[$..0]' - ' will reverse the order of the sub-bricks in the output.' - 'N.B.: Bucket datasets have multiple sub-bricks, but do NOT have' - ' a time dimension. You can input sub-bricks from a 3D+time dataset' - ' into a bucket dataset. You can use the \'3dinfo\' program to see' - ' how many sub-bricks a 3D+time or a bucket dataset contains.' - 'N.B.: In non-bucket functional datasets (like the \'fico\' datasets' - ' output by FIM, or the \'fitt\' datasets output by 3dttest), sub-brick' - ' [0] is the \'intensity\' and sub-brick [1] is the statistical parameter' - ' used as a threshold. Thus, to create a bucket dataset using the' - ' intensity from dataset A and the threshold from dataset B, and' - ' calling the output dataset C, you would type' - ' 3dbucket -prefix C -fbuc \'A+orig[0]\' -fbuc \'B+orig[1]\'' - 'WARNING: using this program, it is possible to create a dataset that' - ' has different basic datum types for different sub-bricks' - ' (e.g., shorts for brick 0, floats for brick 1).' - ' Do NOT do this! Very few AFNI programs will work correctly' - ' with such datasets!') + 'as described in more detail in the following afni help string' + 'Input dataset specified using one of these forms:' + ' \'prefix+view\', \'prefix+view.HEAD\', or \'prefix+view.BRIK\'.' + 'You can also add a sub-brick selection list after the end of the' + 'dataset name. This allows only a subset of the sub-bricks to be' + 'included into the output (by default, all of the input dataset' + 'is copied into the output). A sub-brick selection list looks like' + 'one of the following forms:' + ' fred+orig[5] ==> use only sub-brick #5' + ' fred+orig[5,9,17] ==> use #5, #9, and #17' + ' fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8' + ' fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13' + 'Sub-brick indexes start at 0. You can use the character \'$\'' + 'to indicate the last sub-brick in a dataset; for example, you' + 'can select every third sub-brick by using the selection list' + ' fred+orig[0..$(3)]' + 'N.B.: The sub-bricks are output in the order specified, which may' + ' not be the order in the original datasets. For example, using' + ' fred+orig[0..$(2),1..$(2)]' + ' will cause the sub-bricks in fred+orig to be output into the' + ' new dataset in an interleaved fashion. Using' + ' fred+orig[$..0]' + ' will reverse the order of the sub-bricks in the output.' + 'N.B.: Bucket datasets have multiple sub-bricks, but do NOT have' + ' a time dimension. You can input sub-bricks from a 3D+time dataset' + ' into a bucket dataset. You can use the \'3dinfo\' program to see' + ' how many sub-bricks a 3D+time or a bucket dataset contains.' + 'N.B.: In non-bucket functional datasets (like the \'fico\' datasets' + ' output by FIM, or the \'fitt\' datasets output by 3dttest), sub-brick' + ' [0] is the \'intensity\' and sub-brick [1] is the statistical parameter' + ' used as a threshold. Thus, to create a bucket dataset using the' + ' intensity from dataset A and the threshold from dataset B, and' + ' calling the output dataset C, you would type' + ' 3dbucket -prefix C -fbuc \'A+orig[0]\' -fbuc \'B+orig[1]\'' + 'WARNING: using this program, it is possible to create a dataset that' + ' has different basic datum types for different sub-bricks' + ' (e.g., shorts for brick 0, floats for brick 1).' + ' Do NOT do this! Very few AFNI programs will work correctly' + ' with such datasets!') out_file = File( argstr='-prefix %s', name_template='buck') @@ -407,9 +411,10 @@ class Bucket(AFNICommand): def _format_arg(self, name, spec, value): if name == 'in_file': - return spec.argstr%(' '.join([i[0]+"'"+i[1]+"'" for i in value])) + return spec.argstr % (' '.join([i[0] + "'" + i[1] + "'" for i in value])) return super(Bucket, self)._format_arg(name, spec, value) + class CalcInputSpec(AFNICommandInputSpec): in_file_a = File( desc='input file to 3dcalc', @@ -525,37 +530,37 @@ class CatInputSpec(AFNICommandInputSpec): 'If there is no such header, all columns are kept.', argstr='-nonfixed') out_format = traits.Enum( - 'int','nice','double','fint','cint', + 'int', 'nice', 'double', 'fint', 'cint', argstr='-form %s', desc='specify data type for output. Valid types are \'int\', ' '\'nice\', \'double\', \'fint\', and \'cint\'.', - xor=['out_int','out_nice','out_double','out_fint','out_cint']) + xor=['out_int', 'out_nice', 'out_double', 'out_fint', 'out_cint']) stack = traits.Bool( desc='Stack the columns of the resultant matrix in the output.', argstr='-stack') sel = traits.Str( desc='Apply the same column/row selection string to all filenames ' - 'on the command line.', + 'on the command line.', argstr='-sel %s') out_int = traits.Bool( desc='specifiy int data type for output', argstr='-i', - xor=['out_format','out_nice','out_double','out_fint','out_cint']) + xor=['out_format', 'out_nice', 'out_double', 'out_fint', 'out_cint']) out_nice = traits.Bool( desc='specifiy nice data type for output', argstr='-n', - xor=['out_format','out_int','out_double','out_fint','out_cint']) + xor=['out_format', 'out_int', 'out_double', 'out_fint', 'out_cint']) out_double = traits.Bool( desc='specifiy double data type for output', argstr='-d', - xor=['out_format','out_nice','out_int','out_fint','out_cint']) + xor=['out_format', 'out_nice', 'out_int', 'out_fint', 'out_cint']) out_fint = traits.Bool( desc='specifiy int, rounded down, data type for output', argstr='-f', - xor=['out_format','out_nice','out_double','out_int','out_cint']) + xor=['out_format', 'out_nice', 'out_double', 'out_int', 'out_cint']) out_cint = traits.Bool( desc='specifiy int, rounded up, data type for output', - xor=['out_format','out_nice','out_double','out_fint','out_int']) + xor=['out_format', 'out_nice', 'out_double', 'out_fint', 'out_int']) class Cat(AFNICommand): @@ -584,6 +589,7 @@ class Cat(AFNICommand): input_spec = CatInputSpec output_spec = AFNICommandOutputSpec + class CatMatvecInputSpec(AFNICommandInputSpec): in_file = traits.List( traits.Tuple(traits.Str(), traits.Str()), @@ -598,9 +604,9 @@ class CatMatvecInputSpec(AFNICommandInputSpec): mandatory=True) matrix = traits.Bool( descr="indicates that the resulting matrix will" - "be written to outfile in the 'MATRIX(...)' format (FORM 3)." - "This feature could be used, with clever scripting, to input" - "a matrix directly on the command line to program 3dWarp.", + "be written to outfile in the 'MATRIX(...)' format (FORM 3)." + "This feature could be used, with clever scripting, to input" + "a matrix directly on the command line to program 3dWarp.", argstr="-MATRIX", xor=['oneline', 'fourxfour']) oneline = traits.Bool( @@ -612,7 +618,8 @@ class CatMatvecInputSpec(AFNICommandInputSpec): descr="Output matrix in augmented form (last row is 0 0 0 1)" "This option does not work with -MATRIX or -ONELINE", argstr="-4x4", - xor=['matrix','oneline']) + xor=['matrix', 'oneline']) + class CatMatvec(AFNICommand): """Catenates 3D rotation+shift matrix+vector transformations. @@ -639,7 +646,7 @@ class CatMatvec(AFNICommand): def _format_arg(self, name, spec, value): if name == 'in_file': - return spec.argstr%(' '.join([i[0]+' -'+i[1] for i in value])) + return spec.argstr % (' '.join([i[0] + ' -' + i[1] for i in value])) return super(CatMatvec, self)._format_arg(name, spec, value) @@ -732,7 +739,7 @@ def _list_outputs(self): outputs = super(CenterMass, self)._list_outputs() outputs['out_file'] = os.path.abspath(self.inputs.in_file) outputs['cm_file'] = os.path.abspath(self.inputs.cm_file) - sout = np.loadtxt(outputs['cm_file'], ndmin=2) # pylint: disable=E1101 + sout = np.loadtxt(outputs['cm_file'], ndmin=2) outputs['cm'] = [tuple(s) for s in sout] return outputs @@ -798,6 +805,7 @@ class Copy(AFNICommand): input_spec = CopyInputSpec output_spec = AFNICommandOutputSpec + class DotInputSpec(AFNICommandInputSpec): in_files = traits.List( (File()), @@ -811,7 +819,8 @@ class DotInputSpec(AFNICommandInputSpec): mask = File( desc='Use this dataset as a mask', argstr='-mask %s') - mrange = traits.Tuple((traits.Float(),traits.Float()), + mrange = traits.Tuple( + (traits.Float(), traits.Float()), desc='Means to further restrict the voxels from \'mset\' so that' 'only those mask values within this range (inclusive) willbe used.', argstr='-mrange %s %s') @@ -841,12 +850,13 @@ class DotInputSpec(AFNICommandInputSpec): argstr='-full') show_labels = traits.Bool( desc='Print sub-brick labels to help identify what is being correlated. This option is useful when' - 'you have more than 2 sub-bricks at input.', + 'you have more than 2 sub-bricks at input.', argstr='-show_labels') upper = traits.Bool( desc='Compute upper triangular matrix', argstr='-upper') + class Dot(AFNICommand): """Correlation coefficient between sub-brick pairs. All datasets in in_files list will be concatenated. @@ -869,6 +879,7 @@ class Dot(AFNICommand): input_spec = DotInputSpec output_spec = AFNICommandOutputSpec + class Edge3InputSpec(AFNICommandInputSpec): in_file = File( desc='input file to 3dedge3', @@ -1314,7 +1325,7 @@ def _list_outputs(self): else: outputs['out_detrend'] = Undefined - sout = np.loadtxt(outputs['out_file']) #pylint: disable=E1101 + sout = np.loadtxt(outputs['out_file']) if self._acf: outputs['acf_param'] = tuple(sout[1]) sout = tuple(sout[0]) @@ -1347,7 +1358,7 @@ class MaskToolInputSpec(AFNICommandInputSpec): argstr='-count', position=2) datum = traits.Enum( - 'byte','short','float', + 'byte', 'short', 'float', argstr='-datum %s', desc='specify data type for output. Valid types are \'byte\', ' '\'short\' and \'float\'.') @@ -1523,11 +1534,12 @@ def _list_outputs(self): class NwarpApplyInputSpec(CommandLineInputSpec): - in_file = traits.Either(File(exists=True), traits.List(File(exists=True)), + in_file = traits.Either( + File(exists=True), traits.List(File(exists=True)), mandatory=True, argstr='-source %s', desc='the name of the dataset to be warped ' - 'can be multiple datasets') + 'can be multiple datasets') warp = traits.String( desc='the name of the warp dataset. ' 'multiple warps can be concatenated (make sure they exist)', @@ -1536,18 +1548,21 @@ class NwarpApplyInputSpec(CommandLineInputSpec): inv_warp = traits.Bool( desc='After the warp specified in \'-nwarp\' is computed, invert it', argstr='-iwarp') - master = traits.File(exists=True, + master = traits.File( + exists=True, desc='the name of the master dataset, which defines the output grid', argstr='-master %s') - interp = traits.Enum('NN','nearestneighbour','nearestneighbor','linear', - 'trilinear','cubic','tricubic','quintic','triquintic','wsinc5', + interp = traits.Enum( + 'NN', 'nearestneighbour', 'nearestneighbor', 'linear', + 'trilinear', 'cubic', 'tricubic', 'quintic', 'triquintic', 'wsinc5', desc='defines interpolation method to use during warp', argstr='-interp %s', default='wsinc5') - ainterp = traits.Enum('NN','nearestneighbour','nearestneighbor','linear', - 'trilinear','cubic','tricubic','quintic','triquintic','wsinc5', + ainterp = traits.Enum( + 'NN', 'nearestneighbour', 'nearestneighbor', 'linear', + 'trilinear', 'cubic', 'tricubic', 'quintic', 'triquintic', 'wsinc5', desc='specify a different interpolation method than might ' - 'be used for the warp', + 'be used for the warp', argstr='-ainterp %s', default='wsinc5') out_file = File( @@ -1557,7 +1572,7 @@ class NwarpApplyInputSpec(CommandLineInputSpec): name_source='in_file') short = traits.Bool( desc='Write output dataset using 16-bit short integers, rather than ' - 'the usual 32-bit floats.', + 'the usual 32-bit floats.', argstr='-short') quiet = traits.Bool( desc='don\'t be verbose :(', @@ -1731,18 +1746,19 @@ class OneDToolPyInputSpec(AFNIPythonCommandInputSpec): argstr='-write %s', xor=['show_cormat_warnings']) show_censor_count = traits.Bool( - desc='display the total number of censored TRs Note : if input is a valid xmat.1D dataset,' + desc='display the total number of censored TRs Note : if input is a valid xmat.1D dataset, ' 'then the count will come from the header. Otherwise the input is assumed to be a binary censor' - 'file, and zeros are simply counted.', + 'file, and zeros are simply counted.', argstr="-show_censor_count") censor_motion = traits.Tuple( - (traits.Float(),File()), + (traits.Float(), File()), desc='Tuple of motion limit and outfile prefix. need to also set set_nruns -r set_run_lengths', argstr="-censor_motion %f %s") censor_prev_TR = traits.Bool( desc='for each censored TR, also censor previous', argstr='-censor_prev_TR') - show_trs_uncensored = traits.Enum('comma','space','encoded','verbose', + show_trs_uncensored = traits.Enum( + 'comma', 'space', 'encoded', 'verbose', desc='display a list of TRs which were not censored in the specified style', argstr='-show_trs_uncensored %s') show_cormat_warnings = traits.File( @@ -1759,9 +1775,11 @@ class OneDToolPyInputSpec(AFNIPythonCommandInputSpec): desc="restrict -show_trs_[un]censored to the given 1-based run", argstr="-show_trs_run %d") + class OneDToolPyOutputSpec(AFNICommandOutputSpec): out_file = File(desc='output of 1D_tool.py') + class OneDToolPy(AFNIPythonCommand): """This program is meant to read/manipulate/write/diagnose 1D datasets. Input can be specified using AFNI sub-brick[]/time{} selectors. @@ -1786,13 +1804,14 @@ def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file']=os.path.join(os.getcwd(), self.inputs.out_file) + outputs['out_file'] = os.path.join(os.getcwd(), self.inputs.out_file) if isdefined(self.inputs.show_cormat_warnings): - outputs['out_file']=os.path.join(os.getcwd(), self.inputs.show_cormat_warnings) + outputs['out_file'] = os.path.join(os.getcwd(), self.inputs.show_cormat_warnings) if isdefined(self.inputs.censor_motion): - outputs['out_file']=os.path.join(os.getcwd(), self.inputs.censor_motion[1]) + outputs['out_file'] = os.path.join(os.getcwd(), self.inputs.censor_motion[1]) return outputs + class RefitInputSpec(CommandLineInputSpec): in_file = File( desc='input file to 3drefit', @@ -2023,9 +2042,10 @@ class TCat(AFNICommand): input_spec = TCatInputSpec output_spec = AFNICommandOutputSpec + class TCatSBInputSpec(AFNICommandInputSpec): in_files = traits.List( - traits.Tuple(File(exists=True),Str()), + traits.Tuple(File(exists=True), Str()), desc='List of tuples of file names and subbrick selectors as strings.' 'Don\'t forget to protect the single quotes in the subbrick selector' 'so the contents are protected from the command line interpreter.', @@ -2524,6 +2544,7 @@ def _run_interface(self, runtime): def _list_outputs(self): return {'out': getattr(self, '_gcor')} + class AxializeInputSpec(AFNICommandInputSpec): in_file = File( desc='input file to 3daxialize', @@ -2600,7 +2621,7 @@ class ZcatInputSpec(AFNICommandInputSpec): desc='output dataset prefix name (default \'zcat\')', argstr='-prefix %s') datum = traits.Enum( - 'byte','short','float', + 'byte', 'short', 'float', argstr='-datum %s', desc='specify data type for output. Valid types are \'byte\', ' '\'short\' and \'float\'.') @@ -2623,6 +2644,7 @@ class ZcatInputSpec(AFNICommandInputSpec): argstr='-nscale', xor=['fscale']) + class Zcat(AFNICommand): """Copies an image of one type to an image of the same or different type using 3dZcat command @@ -2646,6 +2668,7 @@ class Zcat(AFNICommand): input_spec = ZcatInputSpec output_spec = AFNICommandOutputSpec + class ZeropadInputSpec(AFNICommandInputSpec): in_files = File( desc='input dataset', @@ -2720,6 +2743,7 @@ class ZeropadInputSpec(AFNICommandInputSpec): xor=['I', 'S', 'A', 'P', 'L', 'R', 'z', 'RL', 'AP', 'IS', 'mm']) + class Zeropad(AFNICommand): """Adds planes of zeros to a dataset (i.e., pads it out). diff --git a/nipype/interfaces/ants/__init__.py b/nipype/interfaces/ants/__init__.py index e8096cc8e0..4e8e875e69 100644 --- a/nipype/interfaces/ants/__init__.py +++ b/nipype/interfaces/ants/__init__.py @@ -8,12 +8,14 @@ from .registration import ANTS, Registration, MeasureImageSimilarity # Resampling Programs -from .resampling import (ApplyTransforms, ApplyTransformsToPoints, WarpImageMultiTransform, - WarpTimeSeriesImageMultiTransform) +from .resampling import ( + ApplyTransforms, ApplyTransformsToPoints, WarpImageMultiTransform, + WarpTimeSeriesImageMultiTransform) # Segmentation Programs -from .segmentation import (Atropos, LaplacianThickness, N4BiasFieldCorrection, JointFusion, CorticalThickness, - BrainExtraction, DenoiseImage, AntsJointFusion) +from .segmentation import ( + Atropos, LaplacianThickness, N4BiasFieldCorrection, JointFusion, + CorticalThickness, BrainExtraction, DenoiseImage, AntsJointFusion) # Visualization Programs from .visualization import ConvertScalarImageToRGB, CreateTiledMosaic diff --git a/nipype/interfaces/ants/base.py b/nipype/interfaces/ants/base.py index 00553fd977..c7db5a8692 100644 --- a/nipype/interfaces/ants/base.py +++ b/nipype/interfaces/ants/base.py @@ -2,7 +2,8 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The ants module provides basic functions for interfacing with ANTS tools.""" -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str import os @@ -46,7 +47,8 @@ def parse_version(raw_info): v_string = v_string.split('-')[0] # 2.2.0-equivalent version string - if 'post' in v_string and LooseVersion(v_string) >= LooseVersion('2.1.0.post789'): + if 'post' in v_string and \ + LooseVersion(v_string) >= LooseVersion('2.1.0.post789'): return '2.2.0' else: return '.'.join(v_string.split('.')[:3]) @@ -98,7 +100,8 @@ def _num_threads_update(self): @staticmethod def _format_xarray(val): - """ Convenience method for converting input arrays [1,2,3] to commandline format '1x2x3' """ + """ Convenience method for converting input arrays [1,2,3] to + commandline format '1x2x3' """ return 'x'.join([str(x) for x in val]) @classmethod diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index a5fd0e63da..9f528b6144 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1031,6 +1031,7 @@ def _format_winsorize_image_intensities(self): self._quantilesDone = True return '--winsorize-image-intensities [ %s, %s ]' % (self.inputs.winsorize_lower_quantile, self.inputs.winsorize_upper_quantile) + def _get_initial_transform_filenames(self): n_transforms = len(self.inputs.initial_moving_transform) diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index 3ed60a51b1..4c315c9407 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -7,7 +7,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, str import os diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index 6c594b5e24..91e7dfe029 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -8,7 +8,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, str import os @@ -773,7 +774,7 @@ def _list_outputs(self): ) outputs['BrainExtractionInitialAffine'] = os.path.join( os.getcwd(), - self.inputs.out_prefix +'BrainExtractionInitialAffine.mat' + self.inputs.out_prefix + 'BrainExtractionInitialAffine.mat' ) outputs['BrainExtractionInitialAffineFixed'] = os.path.join( os.getcwd(), @@ -1025,44 +1026,47 @@ class AntsJointFusionInputSpec(ANTSCommandInputSpec): 'specified, the program tries to infer the ' 'dimensionality from the input image.') target_image = traits.List(InputMultiPath(File(exists=True)), argstr='-t %s', - mandatory=True, desc='The target image (or ' - 'multimodal target images) assumed to be ' - 'aligned to a common image domain.') + mandatory=True, desc='The target image (or ' + 'multimodal target images) assumed to be ' + 'aligned to a common image domain.') atlas_image = traits.List(InputMultiPath(File(exists=True)), argstr="-g %s...", - mandatory=True, desc='The atlas image (or ' - 'multimodal atlas images) assumed to be ' - 'aligned to a common image domain.') - atlas_segmentation_image = InputMultiPath(File(exists=True), argstr="-l %s...", - mandatory=True, desc='The atlas segmentation ' - 'images. For performing label fusion the number ' - 'of specified segmentations should be identical ' - 'to the number of atlas image sets.') + mandatory=True, + desc='The atlas image (or ' + 'multimodal atlas images) assumed to be ' + 'aligned to a common image domain.') + atlas_segmentation_image = InputMultiPath( + File(exists=True), argstr="-l %s...", + mandatory=True, desc='The atlas segmentation ' + 'images. For performing label fusion the number ' + 'of specified segmentations should be identical ' + 'to the number of atlas image sets.') alpha = traits.Float(default_value=0.1, usedefault=True, argstr='-a %s', desc=('Regularization ' 'term added to matrix Mx for calculating the inverse. Default = 0.1')) beta = traits.Float(default_value=2.0, usedefault=True, argstr='-b %s', desc=('Exponent for mapping ' - 'intensity difference to the joint error. Default = 2.0')) - retain_label_posterior_images = traits.Bool(False, argstr='-r', usedefault=True, - requires=['atlas_segmentation_image'], - desc=('Retain label posterior probability images. Requires ' - 'atlas segmentations to be specified. Default = false')) + 'intensity difference to the joint error. Default = 2.0')) + retain_label_posterior_images = traits.Bool( + False, argstr='-r', usedefault=True, + requires=['atlas_segmentation_image'], + desc=('Retain label posterior probability images. Requires ' + 'atlas segmentations to be specified. Default = false')) retain_atlas_voting_images = traits.Bool(False, argstr='-f', usedefault=True, - desc=('Retain atlas voting images. Default = false')) + desc=('Retain atlas voting images. Default = false')) constrain_nonnegative = traits.Bool(False, argstr='-c', usedefault=True, - desc=('Constrain solution to non-negative weights.')) + desc=('Constrain solution to non-negative weights.')) patch_radius = traits.ListInt(minlen=3, maxlen=3, argstr='-p %s', desc=('Patch radius for similarity measures.' 'Default: 2x2x2')) patch_metric = traits.Enum('PC', 'MSQ', argstr='-m %s', usedefault=False, - desc=('Metric to be used in determining the most similar ' - 'neighborhood patch. Options include Pearson\'s ' - 'correlation (PC) and mean squares (MSQ). Default = ' - 'PC (Pearson correlation).')) - search_radius = traits.List([3,3,3], minlen=1, maxlen=3, argstr='-s %s', usedefault=True, - desc=('Search radius for similarity measures. Default = 3x3x3. ' - 'One can also specify an image where the value at the ' - 'voxel specifies the isotropic search radius at that voxel.')) + desc=('Metric to be used in determining the most similar ' + 'neighborhood patch. Options include Pearson\'s ' + 'correlation (PC) and mean squares (MSQ). Default = ' + 'PC (Pearson correlation).')) + search_radius = traits.List([3, 3, 3], minlen=1, maxlen=3, argstr='-s %s', usedefault=True, + desc=('Search radius for similarity measures. Default = 3x3x3. ' + 'One can also specify an image where the value at the ' + 'voxel specifies the isotropic search radius at that voxel.')) exclusion_image_label = traits.List(traits.Str(), argstr='-e %s', requires=['exclusion_image'], - desc=('Specify a label for the exclusion region.')) + desc=('Specify a label for the exclusion region.')) exclusion_image = traits.List(File(exists=True), desc=('Specify an exclusion region for the given label.')) mask_image = File(argstr='-x %s', exists=True, desc='If a mask image ' @@ -1174,16 +1178,16 @@ def _format_arg(self, opt, spec, val): if isdefined(self.inputs.out_label_post_prob_name_format): if isdefined(self.inputs.out_atlas_voting_weight_name_format): retval = '-o [{0}, {1}, {2}, {3}]'.format(self.inputs.out_label_fusion, - self.inputs.out_intensity_fusion_name_format, - self.inputs.out_label_post_prob_name_format, - self.inputs.out_atlas_voting_weight_name_format) + self.inputs.out_intensity_fusion_name_format, + self.inputs.out_label_post_prob_name_format, + self.inputs.out_atlas_voting_weight_name_format) else: retval = '-o [{0}, {1}, {2}]'.format(self.inputs.out_label_fusion, - self.inputs.out_intensity_fusion_name_format, - self.inputs.out_label_post_prob_name_format) + self.inputs.out_intensity_fusion_name_format, + self.inputs.out_label_post_prob_name_format) else: retval = '-o [{0}, {1}]'.format(self.inputs.out_label_fusion, - self.inputs.out_intensity_fusion_name_format) + self.inputs.out_intensity_fusion_name_format) else: retval = '-o {0}'.format(self.inputs.out_label_fusion) elif opt == 'out_intensity_fusion_name_format': @@ -1193,13 +1197,13 @@ def _format_arg(self, opt, spec, val): elif opt == 'atlas_image': atlas_image_cmd = " ".join( ['-g [{0}]'.format(", ".join("'%s'" % fn for fn in ai)) - for ai in self.inputs.atlas_image] + for ai in self.inputs.atlas_image] ) retval = atlas_image_cmd elif opt == 'target_image': target_image_cmd = " ".join( ['-t [{0}]'.format(", ".join("'%s'" % fn for fn in ai)) - for ai in self.inputs.target_image] + for ai in self.inputs.target_image] ) retval = target_image_cmd elif opt == 'atlas_segmentation_image': @@ -1255,8 +1259,8 @@ class KellyKapowskiInputSpec(ANTSCommandInputSpec): white_matter_prob_image = File(exists=True, argstr='--white-matter-probability-image "%s"', desc="In addition to the segmentation image, a white matter probability image can be\n" - "used. If no such image is supplied, one is created using the segmentation image\n" - "and a variance of 1.0 mm.") + "used. If no such image is supplied, one is created using the segmentation image\n" + "and a variance of 1.0 mm.") convergence = traits.Str(default="[50,0.001,10]", argstr='--convergence "%s"', usedefault=True, desc="Convergence is determined by fitting a line to the normalized energy profile of\n" diff --git a/nipype/interfaces/ants/tests/test_resampling.py b/nipype/interfaces/ants/tests/test_resampling.py index 509ebfe844..b398e95dfd 100644 --- a/nipype/interfaces/ants/tests/test_resampling.py +++ b/nipype/interfaces/ants/tests/test_resampling.py @@ -8,76 +8,77 @@ @pytest.fixture() def change_dir(request): - orig_dir = os.getcwd() - filepath = os.path.dirname( os.path.realpath( __file__ ) ) - datadir = os.path.realpath(os.path.join(filepath, '../../../testing/data')) - os.chdir(datadir) + orig_dir = os.getcwd() + filepath = os.path.dirname(os.path.realpath(__file__)) + datadir = os.path.realpath(os.path.join(filepath, '../../../testing/data')) + os.chdir(datadir) - def move2orig(): - os.chdir(orig_dir) + def move2orig(): + os.chdir(orig_dir) - request.addfinalizer(move2orig) + request.addfinalizer(move2orig) @pytest.fixture() def create_wimt(): - wimt = WarpImageMultiTransform() - wimt.inputs.input_image = 'diffusion_weighted.nii' - wimt.inputs.reference_image = 'functional.nii' - wimt.inputs.transformation_series = ['func2anat_coreg_Affine.txt','func2anat_InverseWarp.nii.gz', \ - 'dwi2anat_Warp.nii.gz','dwi2anat_coreg_Affine.txt'] - return wimt + wimt = WarpImageMultiTransform() + wimt.inputs.input_image = 'diffusion_weighted.nii' + wimt.inputs.reference_image = 'functional.nii' + wimt.inputs.transformation_series = ['func2anat_coreg_Affine.txt', 'func2anat_InverseWarp.nii.gz', + 'dwi2anat_Warp.nii.gz', 'dwi2anat_coreg_Affine.txt'] + return wimt + def test_WarpImageMultiTransform(change_dir, create_wimt): - wimt = create_wimt - assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ + wimt = create_wimt + assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt' def test_WarpImageMultiTransform_invaffine_1(change_dir, create_wimt): - wimt = create_wimt - wimt.inputs.invert_affine = [1] - assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ + wimt = create_wimt + wimt.inputs.invert_affine = [1] + assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ -i func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt' def test_WarpImageMultiTransform_invaffine_2(change_dir, create_wimt): - wimt = create_wimt - wimt.inputs.invert_affine = [2] - assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz -i dwi2anat_coreg_Affine.txt' + wimt = create_wimt + wimt.inputs.invert_affine = [2] + assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz -i dwi2anat_coreg_Affine.txt' def test_WarpImageMultiTransform_invaffine_wrong(change_dir, create_wimt): - wimt = create_wimt - wimt.inputs.invert_affine = [3] - with pytest.raises(Exception): - assert wimt.cmdline + wimt = create_wimt + wimt.inputs.invert_affine = [3] + with pytest.raises(Exception): + assert wimt.cmdline @pytest.fixture() def create_wtsimt(): - wtsimt = WarpTimeSeriesImageMultiTransform() - wtsimt.inputs.input_image = 'resting.nii' - wtsimt.inputs.reference_image = 'ants_deformed.nii.gz' - wtsimt.inputs.transformation_series = ['ants_Warp.nii.gz','ants_Affine.txt'] - return wtsimt + wtsimt = WarpTimeSeriesImageMultiTransform() + wtsimt.inputs.input_image = 'resting.nii' + wtsimt.inputs.reference_image = 'ants_deformed.nii.gz' + wtsimt.inputs.transformation_series = ['ants_Warp.nii.gz', 'ants_Affine.txt'] + return wtsimt def test_WarpTimeSeriesImageMultiTransform(change_dir, create_wtsimt): - wtsimt = create_wtsimt - assert wtsimt.cmdline == 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ + wtsimt = create_wtsimt + assert wtsimt.cmdline == 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ -R ants_deformed.nii.gz ants_Warp.nii.gz ants_Affine.txt' def test_WarpTimeSeriesImageMultiTransform_invaffine(change_dir, create_wtsimt): - wtsimt = create_wtsimt - wtsimt.inputs.invert_affine = [1] - assert wtsimt.cmdline == 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ + wtsimt = create_wtsimt + wtsimt.inputs.invert_affine = [1] + assert wtsimt.cmdline == 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ -R ants_deformed.nii.gz ants_Warp.nii.gz -i ants_Affine.txt' def test_WarpTimeSeriesImageMultiTransform_invaffine_wrong(change_dir, create_wtsimt): - wtsimt = create_wtsimt - wtsimt.inputs.invert_affine = [0] - with pytest.raises(Exception): - wtsimt.cmdline + wtsimt = create_wtsimt + wtsimt.inputs.invert_affine = [0] + with pytest.raises(Exception): + wtsimt.cmdline diff --git a/nipype/interfaces/ants/tests/test_spec_JointFusion.py b/nipype/interfaces/ants/tests/test_spec_JointFusion.py index 676631f08e..1c7d3ae6ff 100644 --- a/nipype/interfaces/ants/tests/test_spec_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_spec_JointFusion.py @@ -7,6 +7,7 @@ from nipype.interfaces.ants import JointFusion import pytest + def test_JointFusion_dimension(): at = JointFusion() set_dimension = lambda d: setattr(at.inputs, 'dimension', int(d)) @@ -17,13 +18,16 @@ def test_JointFusion_dimension(): with pytest.raises(TraitError): set_dimension(d) + @pytest.mark.parametrize("m", range(1, 5)) def test_JointFusion_modalities(m): at = JointFusion() setattr(at.inputs, 'modalities', int(m)) assert at.inputs.modalities == int(m) -@pytest.mark.parametrize("a, b", [(a,b) for a in range(10) for b in range(10)]) + +@pytest.mark.parametrize( + "a, b", [(a, b) for a in range(10) for b in range(10)]) def test_JointFusion_method(a, b): at = JointFusion() set_method = lambda a, b: setattr(at.inputs, 'method', 'Joint[%.1f,%d]'.format(a, b)) @@ -38,11 +42,15 @@ def test_JointFusion_method(a, b): # set with alpha/beta assert at.inputs.method == 'Joint[%.1f,%d]'.format(aprime, bprime) -@pytest.mark.parametrize("attr, x", [(attr, x) for attr in ['patch_radius', 'search_radius'] for x in range(5)]) + +@pytest.mark.parametrize( + "attr, x", [(attr, x) for attr in ['patch_radius', 'search_radius'] + for x in range(5)]) def test_JointFusion_radius(attr, x): at = JointFusion() setattr(at.inputs, attr, [x, x+1, x**x]) - assert at._format_arg(attr, None, getattr(at.inputs, attr))[4:] == '{0}x{1}x{2}'.format(x, x + 1, x**x) + assert at._format_arg(attr, None, getattr( + at.inputs, attr))[4:] == '{0}x{1}x{2}'.format(x, x + 1, x**x) def test_JointFusion_cmd(): @@ -63,12 +71,14 @@ def test_JointFusion_cmd(): at.inputs.search_radius = [1, 2, 3] expected_command = ('jointfusion 3 1 -m Joint[0.1,2] -rp 3x2x1 -rs 1x2x3' ' -tg %s -g %s -g %s -l %s -l %s' - ' fusion_labelimage_output.nii') % (T1_image, - warped_intensity_images[0], - warped_intensity_images[1], - segmentation_images[0], - segmentation_images[1]) + ' fusion_labelimage_output.nii') % ( + T1_image, + warped_intensity_images[0], + warped_intensity_images[1], + segmentation_images[0], + segmentation_images[1]) assert at.cmdline == expected_command # setting intensity or labels with unequal lengths raises error with pytest.raises(AssertionError): - at._format_arg('warped_intensity_images', InputMultiPath, warped_intensity_images + [example_data('im3.nii')]) + at._format_arg('warped_intensity_images', InputMultiPath, + warped_intensity_images + [example_data('im3.nii')]) diff --git a/nipype/interfaces/ants/utils.py b/nipype/interfaces/ants/utils.py index 0ba918ee27..5c769b2c14 100644 --- a/nipype/interfaces/ants/utils.py +++ b/nipype/interfaces/ants/utils.py @@ -7,7 +7,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index 07cf8af086..473a0eb89f 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -6,7 +6,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index bcf2656620..bd2a105e82 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -12,7 +12,8 @@ interfaces are found in the ``specs`` module. """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import object, open, str, bytes @@ -307,7 +308,7 @@ def _outputs_help(cls): """ helpstr = ['Outputs::', ''] if cls.output_spec: - outputs = cls.output_spec() # pylint: disable=E1102 + outputs = cls.output_spec() for name, spec in sorted(outputs.traits(transient=None).items()): helpstr += cls._get_trait_desc(outputs, name, spec) if len(helpstr) == 2: @@ -319,7 +320,7 @@ def _outputs(self): """ outputs = None if self.output_spec: - outputs = self.output_spec() # pylint: disable=E1102 + outputs = self.output_spec() return outputs @@ -1080,7 +1081,7 @@ def _list_outputs(self): metadata = dict(name_source=lambda t: t is not None) traits = self.inputs.traits(**metadata) if traits: - outputs = self.output_spec().get() # pylint: disable=E1102 + outputs = self.output_spec().get() for name, trait_spec in list(traits.items()): out_name = name if trait_spec.output_name is not None: @@ -1184,7 +1185,7 @@ class SEMLikeCommandLine(CommandLine): """ def _list_outputs(self): - outputs = self.output_spec().get() # pylint: disable=E1102 + outputs = self.output_spec().get() return self._outputs_from_inputs(outputs) def _outputs_from_inputs(self, outputs): diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index f0d1184d85..b30956c28b 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -9,7 +9,8 @@ Define the API for the I/O of interfaces """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from copy import deepcopy diff --git a/nipype/interfaces/base/support.py b/nipype/interfaces/base/support.py index f047cd120f..3381b6157e 100644 --- a/nipype/interfaces/base/support.py +++ b/nipype/interfaces/base/support.py @@ -7,7 +7,8 @@ ...................................................... """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, object, str import os diff --git a/nipype/interfaces/base/tests/test_core.py b/nipype/interfaces/base/tests/test_core.py index 1eb2cf4b42..aee2649206 100644 --- a/nipype/interfaces/base/tests/test_core.py +++ b/nipype/interfaces/base/tests/test_core.py @@ -161,7 +161,8 @@ def __init__(self, **inputs): # test get hashval in a complex interface from nipype.interfaces.ants import Registration - settings = example_data(example_data('smri_ants_registration_settings.json')) + settings = example_data( + example_data('smri_ants_registration_settings.json')) with open(settings) as setf: data_dict = json.load(setf) diff --git a/nipype/interfaces/base/tests/test_resource_monitor.py b/nipype/interfaces/base/tests/test_resource_monitor.py index 88e71921c4..93118d1ee2 100644 --- a/nipype/interfaces/base/tests/test_resource_monitor.py +++ b/nipype/interfaces/base/tests/test_resource_monitor.py @@ -6,7 +6,8 @@ Module to unit test the resource_monitor in nipype """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import pytest diff --git a/nipype/interfaces/base/tests/test_specs.py b/nipype/interfaces/base/tests/test_specs.py index 168e021339..9568da92ed 100644 --- a/nipype/interfaces/base/tests/test_specs.py +++ b/nipype/interfaces/base/tests/test_specs.py @@ -41,7 +41,8 @@ class spec(nib.TraitedSpec): with pytest.raises(nib.traits.TraitError): specfunc(1) infields = spec(foo=1) - hashval = ([('foo', 1), ('goo', '0.0000000000')], 'e89433b8c9141aa0fda2f8f4d662c047') + hashval = ([('foo', 1), ('goo', '0.0000000000')], + 'e89433b8c9141aa0fda2f8f4d662c047') assert infields.get_hashval() == hashval assert infields.__repr__() == '\nfoo = 1\ngoo = 0.0\n' @@ -131,7 +132,8 @@ class DeprecationSpec3(nib.TraitedSpec): except nib.TraitError: not_raised = False assert not_raised - assert len(w) == 1, 'deprecated warning 1 %s' % [w1.message for w1 in w] + assert len(w) == 1, 'deprecated warning 1 %s' % [ + w1.message for w1 in w] with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) @@ -148,7 +150,8 @@ class DeprecationSpec3(nib.TraitedSpec): assert not_raised assert spec_instance.foo == Undefined assert spec_instance.bar == 1 - assert len(w) == 1, 'deprecated warning 2 %s' % [w1.message for w1 in w] + assert len(w) == 1, 'deprecated warning 2 %s' % [ + w1.message for w1 in w] def test_namesource(setup_file): diff --git a/nipype/interfaces/base/traits_extension.py b/nipype/interfaces/base/traits_extension.py index 18bdd003c2..76341612e6 100644 --- a/nipype/interfaces/base/traits_extension.py +++ b/nipype/interfaces/base/traits_extension.py @@ -20,7 +20,8 @@ (usually by Robert Kern). """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, bytes import os @@ -200,8 +201,8 @@ def validate(self, object, name, value): else: raise TraitError( args='The trait \'{}\' of {} instance is {}, but the path ' - ' \'{}\' does not exist.'.format(name, - class_of(object), self.info_text, value)) + ' \'{}\' does not exist.'.format( + name, class_of(object), self.info_text, value)) self.error(object, name, value) @@ -284,12 +285,12 @@ def grab_exts(self): else [y[0]] for y in img_fmt_types[fmt]], [])) if self.allow_compressed: exts.extend(sum([[u for u in y[-1]] if isinstance(y[-1], - tuple) else [y[-1]] for y in img_fmt_types[fmt]], [])) + tuple) else [y[-1]] for y in img_fmt_types[fmt]], [])) else: raise AttributeError('Information has not been added for format' ' type {} yet. Supported formats include: ' '{}'.format(fmt, - ', '.join(img_fmt_types.keys()))) + ', '.join(img_fmt_types.keys()))) return list(set(exts)) def validate(self, object, name, value): diff --git a/nipype/interfaces/bids_utils.py b/nipype/interfaces/bids_utils.py index 0259a80352..ba6ca3e2c8 100644 --- a/nipype/interfaces/bids_utils.py +++ b/nipype/interfaces/bids_utils.py @@ -98,7 +98,7 @@ def __init__(self, infields=None, **kwargs): "anat": {"modality": "anat"}} # If infields is empty, use all BIDS entities - if not infields is None and have_pybids: + if infields is not None and have_pybids: bids_config = join(dirname(gb.__file__), 'config', 'bids.json') bids_config = json.load(open(bids_config, 'r')) infields = [i['name'] for i in bids_config['entities']] diff --git a/nipype/interfaces/brainsuite/brainsuite.py b/nipype/interfaces/brainsuite/brainsuite.py index 60141bcb00..1ef89b4b44 100644 --- a/nipype/interfaces/brainsuite/brainsuite.py +++ b/nipype/interfaces/brainsuite/brainsuite.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import re as regex @@ -13,6 +14,7 @@ Author: Jason Wong """ + class BseInputSpec(CommandLineInputSpec): inputMRIFile = File( @@ -1621,4 +1623,3 @@ def l_outputs(self): outputs[key] = name return outputs - diff --git a/nipype/interfaces/bru2nii.py b/nipype/interfaces/bru2nii.py index 579b5229b9..faf9b14381 100644 --- a/nipype/interfaces/bru2nii.py +++ b/nipype/interfaces/bru2nii.py @@ -8,7 +8,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from .base import (CommandLine, CommandLineInputSpec, @@ -17,15 +18,20 @@ class Bru2InputSpec(CommandLineInputSpec): input_dir = Directory( - desc="Input Directory", exists=True, mandatory=True, position=-1, argstr="%s") + desc="Input Directory", exists=True, mandatory=True, position=-1, + argstr="%s") actual_size = traits.Bool( - argstr='-a', desc="Keep actual size - otherwise x10 scale so animals match human.") + argstr='-a', + desc="Keep actual size - otherwise x10 scale so animals match human.") force_conversion = traits.Bool( - argstr='-f', desc="Force conversion of localizers images (multiple slice orientations).") + argstr='-f', + desc="Force conversion of localizers images (multiple slice " + "orientations).") append_protocol_name = traits.Bool( argstr='-p', desc="Append protocol name to output filename.") output_filename = traits.Str( - argstr="-o %s", desc="Output filename ('.nii' will be appended)", genfile=True) + argstr="-o %s", desc="Output filename ('.nii' will be appended)", + genfile=True) class Bru2OutputSpec(TraitedSpec): @@ -55,11 +61,12 @@ def _list_outputs(self): output_filename1 = os.path.abspath(self.inputs.output_filename) else: output_filename1 = self._gen_filename('output_filename') - outputs["nii_file"] = output_filename1+".nii" + outputs["nii_file"] = output_filename1 + ".nii" return outputs def _gen_filename(self, name): if name == 'output_filename': outfile = os.path.join( - os.getcwd(), os.path.basename(os.path.normpath(self.inputs.input_dir))) + os.getcwd(), os.path.basename(os.path.normpath( + self.inputs.input_dir))) return outfile diff --git a/nipype/interfaces/c3.py b/nipype/interfaces/c3.py index 334500874c..1e24c06c8a 100644 --- a/nipype/interfaces/c3.py +++ b/nipype/interfaces/c3.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- -"""The ants module provides basic functions for interfacing with ants functions. +"""The ants module provides basic functions for interfacing with ants + functions. Change directory to provide relative paths for doctests >>> import os @@ -7,7 +8,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from .base import (CommandLineInputSpec, traits, TraitedSpec, File, SEMLikeCommandLine) diff --git a/nipype/interfaces/camino/calib.py b/nipype/interfaces/camino/calib.py index c4c07fa71d..a3c35eee4e 100644 --- a/nipype/interfaces/camino/calib.py +++ b/nipype/interfaces/camino/calib.py @@ -7,7 +7,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index 654d71b1f1..cc508a42b5 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -7,7 +7,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from ...utils.filemanip import split_filename diff --git a/nipype/interfaces/camino/convert.py b/nipype/interfaces/camino/convert.py index 6cf8f4f253..e6d12b07d7 100644 --- a/nipype/interfaces/camino/convert.py +++ b/nipype/interfaces/camino/convert.py @@ -7,7 +7,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import glob diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 5aba0560c1..b2019b9791 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -7,7 +7,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os diff --git a/nipype/interfaces/camino/odf.py b/nipype/interfaces/camino/odf.py index 5f16726dfc..1fa4e993f8 100644 --- a/nipype/interfaces/camino/odf.py +++ b/nipype/interfaces/camino/odf.py @@ -7,7 +7,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os diff --git a/nipype/interfaces/camino/utils.py b/nipype/interfaces/camino/utils.py index 0cb07d89f5..ae0b3f0efd 100644 --- a/nipype/interfaces/camino/utils.py +++ b/nipype/interfaces/camino/utils.py @@ -7,7 +7,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from ..base import (traits, TraitedSpec, File, CommandLine, diff --git a/nipype/interfaces/camino2trackvis/convert.py b/nipype/interfaces/camino2trackvis/convert.py index b9032ba1cd..2a91b937b1 100644 --- a/nipype/interfaces/camino2trackvis/convert.py +++ b/nipype/interfaces/camino2trackvis/convert.py @@ -9,7 +9,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 2f29bbb2e2..f42a82c021 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -9,7 +9,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, open import pickle @@ -146,7 +147,6 @@ def create_endpoints_array(fib, voxelSize): n = len(fib) endpoints = np.zeros((n, 2, 3)) endpointsmm = np.zeros((n, 2, 3)) - pc = -1 # Computation for each fiber for i, fi in enumerate(fib): @@ -756,7 +756,6 @@ def create_nodes(roi_file, resolution_network_file, out_filename): gp = nx.read_graphml(resolution_network_file) roi_image = nb.load(roi_file, mmap=NUMPY_MMAP) roiData = roi_image.get_data() - nROIs = len(gp.nodes()) for u, d in gp.nodes(data=True): G.add_node(int(u), **d) xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1)) diff --git a/nipype/interfaces/cmtk/convert.py b/nipype/interfaces/cmtk/convert.py index 7cc9af1000..32bead5c6c 100644 --- a/nipype/interfaces/cmtk/convert.py +++ b/nipype/interfaces/cmtk/convert.py @@ -7,7 +7,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import os.path as op diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index 7410227565..1fae53244d 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index ec3c01336c..72babf1dcb 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -9,7 +9,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, open, range import os.path as op diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index 22214c0036..3b042106e4 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -9,7 +9,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range import os diff --git a/nipype/interfaces/cmtk/tests/test_nbs.py b/nipype/interfaces/cmtk/tests/test_nbs.py index 03a7aa8619..fcfaa0865b 100644 --- a/nipype/interfaces/cmtk/tests/test_nbs.py +++ b/nipype/interfaces/cmtk/tests/test_nbs.py @@ -11,12 +11,13 @@ except Exception as e: have_cv = False + @pytest.fixture() def creating_graphs(tmpdir): graphlist = [] graphnames = ["name"+str(i) for i in range(6)] for idx, name in enumerate(graphnames): - graph = np.random.rand(10,10) + graph = np.random.rand(10, 10) G = nx.from_numpy_matrix(graph) out_file = tmpdir.strpath + graphnames[idx] + '.pck' # Save as pck file @@ -25,7 +26,8 @@ def creating_graphs(tmpdir): return graphlist -@pytest.mark.skipif(have_cv, reason="tests for import error, cviewer available") +@pytest.mark.skipif( + have_cv, reason="tests for import error, cviewer available") def test_importerror(creating_graphs, tmpdir): tmpdir.chdir() graphlist = creating_graphs @@ -44,7 +46,7 @@ def test_importerror(creating_graphs, tmpdir): @pytest.mark.skipif(not have_cv, reason="cviewer has to be available") def test_keyerror(creating_graphs): - graphlist =creating_graphs + graphlist = creating_graphs group1 = graphlist[:3] group2 = graphlist[3:] diff --git a/nipype/interfaces/dcm2nii.py b/nipype/interfaces/dcm2nii.py index 22e9375609..583d656afc 100644 --- a/nipype/interfaces/dcm2nii.py +++ b/nipype/interfaces/dcm2nii.py @@ -7,7 +7,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, open import os import re @@ -171,7 +172,7 @@ def _parse_stdout(self, stdout): out_file = val if out_file: - if not out_file in files: + if out_file not in files: files.append(out_file) last_added_file = out_file continue @@ -225,7 +226,7 @@ class Dcm2niixInputSpec(CommandLineInputSpec): desc="Output directory") bids_format = traits.Bool(True, argstr='-b', usedefault=True, desc="Create a BIDS sidecar file") - compress = traits.Enum('i', ['y','i','n'], argstr='-z %s', usedefault=True, + compress = traits.Enum('i', ['y', 'i', 'n'], argstr='-z %s', usedefault=True, desc="Gzip compress images - [y=pigz, i=internal, n=no]") merge_imgs = traits.Bool(False, argstr='-m', usedefault=True, desc="merge 2D slices from same series") @@ -289,7 +290,7 @@ def _run_interface(self, runtime): (self.output_files, self.bvecs, self.bvals, self.bids) = self._parse_stdout(new_runtime.stdout) else: - (self.output_files, self.bvecs, + (self.output_files, self.bvecs, self.bvals) = self._parse_stdout(new_runtime.stdout) return new_runtime @@ -303,7 +304,7 @@ def _parse_stdout(self, stdout): for line in stdout.split("\n"): if not skip: out_file = None - if line.startswith("Convert "): # output + if line.startswith("Convert "): # output fname = str(re.search('\S+/\S+', line).group(0)) if isdefined(self.inputs.output_dir): output_dir = self.inputs.output_dir diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py index 9e49f2b326..a908b5fe69 100644 --- a/nipype/interfaces/dcmstack.py +++ b/nipype/interfaces/dcmstack.py @@ -8,7 +8,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from os import path as op @@ -52,7 +53,8 @@ class NiftiGeneratorBaseInputSpec(TraitedSpec): "meta data to create the output filename(s)") out_ext = traits.Str('.nii.gz', usedefault=True, desc="Determines output file type") - out_path = Directory(desc='output path, current working directory if not set') + out_path = Directory( + desc='output path, current working directory if not set') class NiftiGeneratorBase(BaseInterface): @@ -153,7 +155,8 @@ def _run_interface(self, runtime): stack = dcmstack.DicomStack(meta_filter=meta_filter) for src_path in src_paths: if not imghdr.what(src_path) == "gif": - src_dcm = dicom.read_file(src_path, force=self.inputs.force_read) + src_dcm = dicom.read_file( + src_path, force=self.inputs.force_read) stack.add_dcm(src_dcm) nii = stack.to_nifti(embed_meta=True) nw = NiftiWrapper(nii) @@ -210,8 +213,9 @@ class LookupMetaInputSpec(TraitedSpec): traits.Dict(), mandatory=True, desc=("List of meta data keys to lookup, or a " - "dict where keys specify the meta data keys to " - "lookup and the values specify the output names") + "dict where keys specify the meta data " + "keys to lookup and the values specify " + "the output names") ) @@ -313,7 +317,8 @@ def _run_interface(self, runtime): src_dict = src.meta_ext.get_class_dict(cls) dest_dict = dest.meta_ext.get_class_dict(cls) dest_dict.update(src_dict) - # Update the shape and slice dimension to reflect the meta extension update. + # Update the shape and slice dimension to reflect the meta extension + # update. dest.meta_ext.slice_dim = src.meta_ext.slice_dim dest.meta_ext.shape = src.meta_ext.shape diff --git a/nipype/interfaces/diffusion_toolkit/base.py b/nipype/interfaces/diffusion_toolkit/base.py index 0e3c5a400a..0dfa26afd5 100644 --- a/nipype/interfaces/diffusion_toolkit/base.py +++ b/nipype/interfaces/diffusion_toolkit/base.py @@ -13,7 +13,8 @@ See the docstrings for the individual classes for 'working' examples. """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import object import re from ..base import CommandLine diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index 3ba0beeafc..c7db41daab 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -10,7 +10,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, open import os @@ -22,6 +23,7 @@ __docformat__ = 'restructuredtext' + class DTIReconInputSpec(CommandLineInputSpec): DWI = File(desc='Input diffusion volume', argstr='%s', exists=True, mandatory=True, position=1) out_prefix = traits.Str("dti", desc='Output file prefix', argstr='%s', usedefault=True, position=2) diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index ee7933f64c..8cd3cb8dfd 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -10,7 +10,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range import os @@ -22,6 +23,7 @@ __docformat__ = 'restructuredtext' + class HARDIMatInputSpec(CommandLineInputSpec): bvecs = File(exists=True, desc='b vectors file', argstr='%s', position=1, mandatory=True) diff --git a/nipype/interfaces/diffusion_toolkit/postproc.py b/nipype/interfaces/diffusion_toolkit/postproc.py index 4a14d96b90..a60ca415d5 100644 --- a/nipype/interfaces/diffusion_toolkit/postproc.py +++ b/nipype/interfaces/diffusion_toolkit/postproc.py @@ -10,7 +10,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from ..base import (TraitedSpec, File, traits, CommandLine, @@ -18,10 +19,17 @@ __docformat__ = 'restructuredtext' + class SplineFilterInputSpec(CommandLineInputSpec): - track_file = File(exists=True, desc="file containing tracks to be filtered", position=0, argstr="%s", mandatory=True) - step_length = traits.Float(desc="in the unit of minimum voxel size", position=1, argstr="%f", mandatory=True) - output_file = File("spline_tracks.trk", desc="target file for smoothed tracks", position=2, argstr="%s", usedefault=True) + track_file = File( + exists=True, desc="file containing tracks to be filtered", position=0, + argstr="%s", mandatory=True) + step_length = traits.Float( + desc="in the unit of minimum voxel size", position=1, argstr="%f", + mandatory=True) + output_file = File( + "spline_tracks.trk", desc="target file for smoothed tracks", + position=2, argstr="%s", usedefault=True) class SplineFilterOutputSpec(TraitedSpec): @@ -53,13 +61,18 @@ class SplineFilter(CommandLine): def _list_outputs(self): outputs = self.output_spec().get() - outputs['smoothed_track_file'] = os.path.abspath(self.inputs.output_file) + outputs['smoothed_track_file'] = os.path.abspath( + self.inputs.output_file) return outputs class TrackMergeInputSpec(CommandLineInputSpec): - track_files = InputMultiPath(File(exists=True), desc="file containing tracks to be filtered", position=0, argstr="%s...", mandatory=True) - output_file = File("merged_tracks.trk", desc="target file for merged tracks", position=-1, argstr="%s", usedefault=True) + track_files = InputMultiPath( + File(exists=True), desc="file containing tracks to be filtered", + position=0, argstr="%s...", mandatory=True) + output_file = File( + "merged_tracks.trk", desc="target file for merged tracks", position=-1, + argstr="%s", usedefault=True) class TrackMergeOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/dipy/anisotropic_power.py b/nipype/interfaces/dipy/anisotropic_power.py index 2a678dfd1f..58613a5df5 100644 --- a/nipype/interfaces/dipy/anisotropic_power.py +++ b/nipype/interfaces/dipy/anisotropic_power.py @@ -6,7 +6,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import nibabel as nb @@ -58,7 +59,7 @@ def _run_interface(self, runtime): mask = nb.load(self.inputs.mask_file).get_data() # Fit it - model = shm.QballModel(gtab,8) + model = shm.QballModel(gtab, 8) sphere = get_sphere('symmetric724') peaks = peaks_from_model(model=model, data=data, relative_peak_threshold=.5, diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index d5b2e9957f..e878a70898 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- """ Base interfaces for dipy """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op import numpy as np diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index bfe197cae2..ee4a733205 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -7,7 +7,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op import nibabel as nb diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index d10e51dede..2a2f61a8bb 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -3,7 +3,8 @@ Interfaces to the reconstruction algorithms in dipy """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from future import standard_library standard_library.install_aliases() from builtins import str, open @@ -123,9 +124,9 @@ def _run_interface(self, runtime): sigma = mean_std * (1 + bias) if sigma == 0: - IFLOGGER.warn('Noise std is 0.0, looks like data was masked and noise ' - 'cannot be estimated correctly. Using default tensor ' - 'model instead of RESTORE.') + IFLOGGER.warn('Noise std is 0.0, looks like data was masked and ' + 'noise cannot be estimated correctly. Using default ' + 'tensor model instead of RESTORE.') dti = TensorModel(gtab) else: IFLOGGER.info('Performing RESTORE with noise std=%.4f.', sigma) @@ -255,7 +256,8 @@ def _run_interface(self, runtime): 'Ratio=%0.3f.', ratio) elif ratio < 1.e-5 or np.any(np.isnan(response)): response = np.array([1.8e-3, 3.6e-4, 3.6e-4, S0]) - IFLOGGER.warn('Estimated response is not valid, using a default one') + IFLOGGER.warn( + 'Estimated response is not valid, using a default one') else: IFLOGGER.info('Estimated response: %s', str(response[:3])) @@ -324,7 +326,6 @@ def _run_interface(self, runtime): img = nb.load(self.inputs.in_file) imref = nb.four_to_three(img)[0] - affine = img.affine if isdefined(self.inputs.in_mask): msk = nb.load(self.inputs.in_mask).get_data() @@ -332,7 +333,6 @@ def _run_interface(self, runtime): msk = np.ones(imref.shape) data = img.get_data().astype(np.float32) - hdr = imref.header.copy() gtab = self._get_gradient_table() resp_file = np.loadtxt(self.inputs.response) diff --git a/nipype/interfaces/dipy/setup.py b/nipype/interfaces/dipy/setup.py index 43dfe1156c..3465b32745 100644 --- a/nipype/interfaces/dipy/setup.py +++ b/nipype/interfaces/dipy/setup.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) def configuration(parent_package='', top_path=None): diff --git a/nipype/interfaces/dipy/simulate.py b/nipype/interfaces/dipy/simulate.py index f008948c97..1a8b348be6 100644 --- a/nipype/interfaces/dipy/simulate.py +++ b/nipype/interfaces/dipy/simulate.py @@ -5,7 +5,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from multiprocessing import (Pool, cpu_count) import os.path as op from builtins import range @@ -120,7 +121,8 @@ def _run_interface(self, runtime): # Volume fractions of isotropic compartments nballs = len(self.inputs.in_vfms) vfs = np.squeeze(nb.concat_images( - [nb.load(f, mmap=NUMPY_MMAP) for f in self.inputs.in_vfms]).get_data()) + [nb.load(f, mmap=NUMPY_MMAP) + for f in self.inputs.in_vfms]).get_data()) if nballs == 1: vfs = vfs[..., np.newaxis] total_vf = np.sum(vfs, axis=3) @@ -138,7 +140,8 @@ def _run_interface(self, runtime): nvox = len(msk[msk > 0]) # Fiber fractions - ffsim = nb.concat_images([nb.load(f, mmap=NUMPY_MMAP) for f in self.inputs.in_frac]) + ffsim = nb.concat_images([nb.load(f, mmap=NUMPY_MMAP) + for f in self.inputs.in_frac]) ffs = np.nan_to_num(np.squeeze(ffsim.get_data())) # fiber fractions ffs = np.clip(ffs, 0., 1.) if nsticks == 1: @@ -228,8 +231,9 @@ def _run_interface(self, runtime): pool = Pool(processes=n_proc) # Simulate sticks using dipy - IFLOGGER.info('Starting simulation of %d voxels, %d diffusion directions.', - len(args), ndirs) + IFLOGGER.info( + 'Starting simulation of %d voxels, %d diffusion directions.', + len(args), ndirs) result = np.array(pool.map(_compute_voxel, args)) if np.shape(result)[1] != ndirs: raise RuntimeError(('Computed directions do not match number' @@ -287,7 +291,7 @@ def _compute_voxel(args): signal, _ = multi_tensor( gtab, args['mevals'], S0=args['S0'], angles=args['sticks'], fractions=ffs, snr=snr) - except Exception as e: + except Exception: pass return signal.tolist() diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index e5518f4ea0..e694fc05fa 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -5,7 +5,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import nibabel as nb @@ -67,9 +68,9 @@ def _run_interface(self, runtime): nb.save(img, out_file) IFLOGGER.info('DTI parameters image saved as %s', out_file) - #FA MD RD and AD + # FA MD RD and AD for metric in ["fa", "md", "rd", "ad"]: - data = getattr(ten_fit,metric).astype("float32") + data = getattr(ten_fit, metric).astype("float32") out_name = self._gen_filename(metric) nb.Nifti1Image(data, affine).to_filename(out_name) IFLOGGER.info('DTI %s image saved as %s', metric, out_name) diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index bd52fe937e..47ce10bd9f 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -6,7 +6,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op import numpy as np @@ -268,7 +269,8 @@ def _run_interface(self, runtime): ss_mm = [np.array(s) for s in eu] trkfilev = nb.trackvis.TrackvisFile( - [(s, None, None) for s in ss_mm], points_space='rasmm', affine=np.eye(4)) + [(s, None, None) for s in ss_mm], points_space='rasmm', + affine=np.eye(4)) trkfilev.to_file(self._gen_filename('tracked', ext='.trk')) return runtime diff --git a/nipype/interfaces/dynamic_slicer.py b/nipype/interfaces/dynamic_slicer.py index 4d1df1e136..e3a119f379 100644 --- a/nipype/interfaces/dynamic_slicer.py +++ b/nipype/interfaces/dynamic_slicer.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import warnings diff --git a/nipype/interfaces/elastix/base.py b/nipype/interfaces/elastix/base.py index 746e571f3f..3b2efbaf96 100644 --- a/nipype/interfaces/elastix/base.py +++ b/nipype/interfaces/elastix/base.py @@ -10,7 +10,8 @@ """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from ... import logging from ..base import CommandLineInputSpec, Directory, traits diff --git a/nipype/interfaces/elastix/registration.py b/nipype/interfaces/elastix/registration.py index 5038447465..5d8281cbed 100644 --- a/nipype/interfaces/elastix/registration.py +++ b/nipype/interfaces/elastix/registration.py @@ -8,7 +8,8 @@ displacement maps to images and points. """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os.path as op @@ -70,7 +71,6 @@ def _list_outputs(self): out_dir = op.abspath(self.inputs.output_path) - opts = ['WriteResultImage', 'ResultImageFormat'] regex = re.compile(r'^\((\w+)\s(.+)\)$') outputs['transform'] = [] diff --git a/nipype/interfaces/elastix/utils.py b/nipype/interfaces/elastix/utils.py index 718f5310fd..486933c25c 100644 --- a/nipype/interfaces/elastix/utils.py +++ b/nipype/interfaces/elastix/utils.py @@ -8,7 +8,8 @@ transform files (to configure warpings) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os.path as op diff --git a/nipype/interfaces/freesurfer/base.py b/nipype/interfaces/freesurfer/base.py index 56a4d5df77..2c34d63d9a 100644 --- a/nipype/interfaces/freesurfer/base.py +++ b/nipype/interfaces/freesurfer/base.py @@ -14,7 +14,8 @@ See the docstrings for the individual classes for 'working' examples. """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open, object, str import os @@ -179,10 +180,10 @@ def version(self): class FSSurfaceCommand(FSCommand): """Support for FreeSurfer surface-related functions. - For some functions, if the output file is not specified starting with - 'lh.' or 'rh.', FreeSurfer prepends the prefix from the input file to the - output filename. Output out_file must be adjusted to accommodate this. - By including the full path in the filename, we can also avoid this behavior. + For some functions, if the output file is not specified starting with 'lh.' + or 'rh.', FreeSurfer prepends the prefix from the input file to the output + filename. Output out_file must be adjusted to accommodate this. By + including the full path in the filename, we can also avoid this behavior. """ def _get_filecopy_info(self): self._normalize_filenames() diff --git a/nipype/interfaces/freesurfer/longitudinal.py b/nipype/interfaces/freesurfer/longitudinal.py index 84559c90d9..2c7e706b19 100644 --- a/nipype/interfaces/freesurfer/longitudinal.py +++ b/nipype/interfaces/freesurfer/longitudinal.py @@ -10,7 +10,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index e2eba23196..ad0d4692cd 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os @@ -23,6 +24,7 @@ __docformat__ = 'restructuredtext' + class MRISPreprocInputSpec(FSTraitedSpec): out_file = File(argstr='--out %s', genfile=True, desc='output filename') @@ -122,8 +124,8 @@ class MRISPreprocReconAllInputSpec(MRISPreprocInputSpec): 'surf_measure_file', 'surf_area'), desc='file necessary for surfmeas') surfreg_files = InputMultiPath(File(exists=True), argstr="--surfreg %s", - requires=['lh_surfreg_target', 'rh_surfreg_target'], - desc="lh and rh input surface registration files") + requires=['lh_surfreg_target', 'rh_surfreg_target'], + desc="lh and rh input surface registration files") lh_surfreg_target = File(desc="Implicit target surface registration file", requires=['surfreg_files']) rh_surfreg_target = File(desc="Implicit target surface registration file", @@ -183,10 +185,10 @@ def run(self, **inputs): def _format_arg(self, name, spec, value): # mris_preproc looks for these files in the surf dir - if name == 'surfreg_files': + if name == 'surfreg_files': basename = os.path.basename(value[0]) return spec.argstr % basename.lstrip('rh.').lstrip('lh.') - if name == "surf_measure_file": + if name == "surf_measure_file": basename = os.path.basename(value) return spec.argstr % basename.lstrip('rh.').lstrip('lh.') return super(MRISPreprocReconAll, self)._format_arg(name, spec, value) @@ -210,7 +212,7 @@ class GLMFitInputSpec(FSTraitedSpec): xor=('one_sample', 'fsgd', 'design', 'contrast'), desc='construct X and C as a one-sample group mean') no_contrast_ok = traits.Bool(argstr='--no-contrasts-ok', - desc='do not fail if no contrasts specified') + desc='do not fail if no contrasts specified') per_voxel_reg = InputMultiPath(File(exists=True), argstr='--pvr %s...', desc='per-voxel regressors') self_reg = traits.Tuple(traits.Int, traits.Int, traits.Int, @@ -1069,7 +1071,7 @@ def _gen_filename(self, name): class Label2LabelInputSpec(FSTraitedSpec): hemisphere = traits.Enum('lh', 'rh', argstr="--hemi %s", mandatory=True, - desc="Input hemisphere") + desc="Input hemisphere") subject_id = traits.String('subject_id', usedefault=True, argstr="--trgsubject %s", mandatory=True, desc="Target subject") @@ -1091,11 +1093,12 @@ class Label2LabelInputSpec(FSTraitedSpec): hash_files=False, keep_extension=True, desc="Target label") registration_method = traits.Enum('surface', 'volume', usedefault=True, - argstr="--regmethod %s", desc="Registration method") + argstr="--regmethod %s", desc="Registration method") copy_inputs = traits.Bool(desc="If running as a node, set this to True." + "This will copy the input files to the node " + "directory.") + class Label2LabelOutputSpec(TraitedSpec): out_file = File(exists=True, desc='Output label') @@ -1311,7 +1314,6 @@ def _gen_filename(self, name): avg_directory = os.path.join(self.inputs.subjects_dir, avg_subject) if not os.path.isdir(avg_directory): fs_home = os.path.abspath(os.environ.get('FREESURFER_HOME')) - avg_home = os.path.join(fs_home, 'subjects', 'fsaverage') return avg_subject elif name == 'out_file': return self._list_outputs()[name] diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index 1d209c3022..e395a6f705 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -10,7 +10,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range import os @@ -1770,6 +1771,7 @@ class MNIBiasCorrectionInputSpec(FSTraitedSpec): shrink = traits.Int(argstr="--shrink %d", desc="Shrink parameter for finer sampling (default is 4)") + class MNIBiasCorrectionOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output volume") @@ -1929,6 +1931,7 @@ class CANormalizeInputSpec(FSTraitedSpec): long_file = File(argstr='-long %s', desc='undocumented flag used in longitudinal processing') + class CANormalizeOutputSpec(TraitedSpec): out_file = traits.File(exists=False, desc="The output file for Normalize") control_points = File( @@ -1964,7 +1967,7 @@ def _list_outputs(self): class CARegisterInputSpec(FSTraitedSpecOpenMP): - #required + # required in_file = File(argstr='%s', exists=True, mandatory=True, position=-3, desc="The input volume for CARegister") out_file = File(argstr='%s', position=-1, @@ -2031,7 +2034,7 @@ def _list_outputs(self): class CALabelInputSpec(FSTraitedSpecOpenMP): - #required + # required in_file = File(argstr="%s", position=-4, mandatory=True, exists=True, desc="Input volume for CALabel") out_file = File(argstr="%s", position=-1, mandatory=True, exists=False, @@ -2049,7 +2052,7 @@ class CALabelInputSpec(FSTraitedSpecOpenMP): argstr="-nobigventricles", desc="No big ventricles") align = traits.Bool(argstr="-align", desc="Align CALabel") prior = traits.Float(argstr="-prior %.1f", - desc="Prior for CALabel") + desc="Prior for CALabel") relabel_unlikely = traits.Tuple(traits.Int, traits.Float, argstr="-relabel_unlikely %d %.1f", desc=("Reclassify voxels at least some std" @@ -2356,6 +2359,7 @@ class EditWMwithAsegInputSpec(FSTraitedSpec): keep_in = traits.Bool(argstr="-keep-in", desc="Keep edits as found in input volume") + class EditWMwithAsegOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output edited WM file") diff --git a/nipype/interfaces/freesurfer/registration.py b/nipype/interfaces/freesurfer/registration.py index 60a10b4c11..13cf0e6b6a 100644 --- a/nipype/interfaces/freesurfer/registration.py +++ b/nipype/interfaces/freesurfer/registration.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import os.path @@ -181,7 +182,7 @@ class EMRegisterInputSpec(FSTraitedSpecOpenMP): skull = traits.Bool( argstr="-skull", desc="align to atlas containing skull (uns=5)") mask = File(argstr="-mask %s", exists=True, - desc="use volume as a mask") + desc="use volume as a mask") nbrspacing = traits.Int(argstr="-uns %d", desc="align to atlas containing skull setting unknown_nbr_spacing = nbrspacing") transform = File(argstr="-t %s", exists=True, diff --git a/nipype/interfaces/freesurfer/tests/test_BBRegister.py b/nipype/interfaces/freesurfer/tests/test_BBRegister.py index 9725065fef..164772d15e 100644 --- a/nipype/interfaces/freesurfer/tests/test_BBRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_BBRegister.py @@ -13,7 +13,8 @@ def test_BBRegister_inputs(): ignore_exception=dict(nohash=True, usedefault=True,), init=dict(argstr='--init-%s', mandatory=True, xor=['init_reg_file'],), init_cost_file=dict(argstr='--initcost %s',), - init_reg_file=dict(argstr='--init-reg %s', mandatory=True, xor=['init'],), + init_reg_file=dict(argstr='--init-reg %s', mandatory=True, + xor=['init'],), intermediate_file=dict(argstr='--int %s',), out_fsl_file=dict(argstr='--fslmat %s',), out_lta_file=dict(argstr='--lta %s', min_ver='5.2.0',), diff --git a/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py b/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py index 70701e5f57..dde43afa18 100644 --- a/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py +++ b/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py @@ -1,4 +1,3 @@ -# AUTO-GENERATED by tools/checkspecs.py on 2017.04.21 # Modified 2017.04.21 by Chris Markiewicz from __future__ import unicode_literals import pytest @@ -9,18 +8,14 @@ def test_FSSurfaceCommand_inputs(): - input_map = dict(args=dict(argstr='%s', - ), - environ=dict(nohash=True, - usedefault=True, - ), - ignore_exception=dict(nohash=True, - usedefault=True, - ), - subjects_dir=dict(), - terminal_output=dict(nohash=True, - ), - ) + input_map = dict(args=dict(argstr='%s'), + environ=dict(nohash=True, + usedefault=True), + ignore_exception=dict(nohash=True, + usedefault=True), + subjects_dir=dict(), + terminal_output=dict(nohash=True), + ) inputs = FSSurfaceCommand.input_spec() for key, metadata in list(input_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_model.py b/nipype/interfaces/freesurfer/tests/test_model.py index a30a29b0ac..d31837deec 100644 --- a/nipype/interfaces/freesurfer/tests/test_model.py +++ b/nipype/interfaces/freesurfer/tests/test_model.py @@ -31,7 +31,8 @@ def test_concatenate(tmpdir): # Test default behavior res = model.Concatenate(in_files=[in1, in2]).run() - assert res.outputs.concatenated_file == tmpdir.join('concat_output.nii.gz').strpath + assert res.outputs.concatenated_file == tmpdir.join( + 'concat_output.nii.gz').strpath assert np.allclose(nb.load('concat_output.nii.gz').get_data(), out_data) # Test specified concatenated_file @@ -48,7 +49,7 @@ def test_concatenate(tmpdir): wf.run() assert np.allclose(nb.load(tmpdir.join( 'test_concatenate', - 'concat', out).strpath).get_data(), + 'concat', out).strpath).get_data(), out_data) # Test a simple statistic diff --git a/nipype/interfaces/freesurfer/tests/test_preprocess.py b/nipype/interfaces/freesurfer/tests/test_preprocess.py index 4965329fae..786ce2b2c4 100644 --- a/nipype/interfaces/freesurfer/tests/test_preprocess.py +++ b/nipype/interfaces/freesurfer/tests/test_preprocess.py @@ -23,7 +23,8 @@ def test_robustregister(create_files_in_directory): assert reg.cmd == 'mri_robust_register' # test raising error with mandatory args absent - with pytest.raises(ValueError): reg.run() + with pytest.raises(ValueError): + reg.run() # .inputs based parameters setting reg.inputs.source_file = filelist[0] @@ -51,7 +52,8 @@ def test_fitmsparams(create_files_in_directory): assert fit.cmd == 'mri_ms_fitparms' # test raising error with mandatory args absent - with pytest.raises(ValueError): fit.run() + with pytest.raises(ValueError): + fit.run() # .inputs based parameters setting fit.inputs.in_files = filelist @@ -74,7 +76,8 @@ def test_synthesizeflash(create_files_in_directory): assert syn.cmd == 'mri_synthesize' # test raising error with mandatory args absent - with pytest.raises(ValueError): syn.run() + with pytest.raises(ValueError): + syn.run() # .inputs based parameters setting syn.inputs.t1_image = filelist[0] @@ -91,6 +94,7 @@ def test_synthesizeflash(create_files_in_directory): assert syn2.cmdline == ('mri_synthesize 25.00 20.00 5.000 %s %s %s' % (filelist[0], filelist[1], os.path.join(outdir, 'synth-flash_20.mgz'))) + @pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_mandatory_outvol(create_files_in_directory): filelist, outdir = create_files_in_directory @@ -100,7 +104,8 @@ def test_mandatory_outvol(create_files_in_directory): assert mni.cmd == "mri_nu_correct.mni" # test raising error with mandatory args absent - with pytest.raises(ValueError): mni.cmdline + with pytest.raises(ValueError): + mni.cmdline # test with minimal args mni.inputs.in_file = filelist[0] @@ -122,7 +127,8 @@ def test_mandatory_outvol(create_files_in_directory): out_file='bias_corrected_output', iterations=4) assert mni2.cmdline == ('mri_nu_correct.mni --i %s --n 4 --o bias_corrected_output' - % filelist[0]) + % filelist[0]) + @pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_bbregister(create_files_in_directory): @@ -158,6 +164,7 @@ def test_bbregister(create_files_in_directory): '--mov {full} --s fsaverage'.format( full=filelist[0], base=base)) + def test_FSVersion(): """Check that FSVersion is a string that can be compared with LooseVersion """ diff --git a/nipype/interfaces/freesurfer/tests/test_utils.py b/nipype/interfaces/freesurfer/tests/test_utils.py index c033a1e346..cb0b8c062e 100644 --- a/nipype/interfaces/freesurfer/tests/test_utils.py +++ b/nipype/interfaces/freesurfer/tests/test_utils.py @@ -1,9 +1,11 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open -import os, os.path as op +import os +import os.path as op import pytest from nipype.testing.fixtures import (create_files_in_directory_plus_dummy_file, create_surf_file_in_directory) @@ -22,7 +24,8 @@ def test_sample2surf(create_files_in_directory_plus_dummy_file): assert s2s.cmd == 'mri_vol2surf' # Test mandatory args exception - with pytest.raises(ValueError): s2s.run() + with pytest.raises(ValueError): + s2s.run() # Create testing files files, cwd = create_files_in_directory_plus_dummy_file @@ -52,7 +55,8 @@ def test_sample2surf(create_files_in_directory_plus_dummy_file): # Test that a 2-tuple range raises an error def set_illegal_range(): s2s.inputs.sampling_range = (.2, .5) - with pytest.raises(TraitError): set_illegal_range() + with pytest.raises(TraitError): + set_illegal_range() @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") @@ -64,7 +68,8 @@ def test_surfsmooth(create_surf_file_in_directory): assert smooth.cmd == "mri_surf2surf" # Test mandatory args exception - with pytest.raises(ValueError): smooth.run() + with pytest.raises(ValueError): + smooth.run() # Create testing files surf, cwd = create_surf_file_in_directory @@ -96,7 +101,8 @@ def test_surfxfm(create_surf_file_in_directory): assert xfm.cmd == "mri_surf2surf" # Test mandatory args exception - with pytest.raises(ValueError): xfm.run() + with pytest.raises(ValueError): + xfm.run() # Create testing files surf, cwd = create_surf_file_in_directory @@ -127,7 +133,8 @@ def test_surfshots(create_files_in_directory_plus_dummy_file): assert fotos.cmd == "tksurfer" # Test mandatory args exception - with pytest.raises(ValueError): fotos.run() + with pytest.raises(ValueError): + fotos.run() # Create testing files files, cwd = create_files_in_directory_plus_dummy_file @@ -157,7 +164,8 @@ def test_surfshots(create_files_in_directory_plus_dummy_file): try: hold_display = os.environ["DISPLAY"] del os.environ["DISPLAY"] - with pytest.raises(RuntimeError): fotos.run() + with pytest.raises(RuntimeError): + fotos.run() os.environ["DISPLAY"] = hold_display except KeyError: pass diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index a5568ebbcb..923bc85e78 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -10,7 +10,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, open import os @@ -40,6 +41,7 @@ logger = logging.getLogger('interface') + def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): """Method to copy an input to the subjects directory""" # check that the input is defined @@ -49,18 +51,18 @@ def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): if isdefined(cls.inputs.subjects_dir): subjects_dir = cls.inputs.subjects_dir else: - subjects_dir = os.getcwd() #if not use cwd + subjects_dir = os.getcwd() # if not use cwd # check for subject_id if not subject_id: if isdefined(cls.inputs.subject_id): subject_id = cls.inputs.subject_id else: - subject_id = 'subject_id' #default + subject_id = 'subject_id' # default # check for basename - if basename == None: + if basename is None: basename = os.path.basename(in_file) # check which folder to put the file in - if folder != None: + if folder is not None: out_dir = os.path.join(subjects_dir, subject_id, folder) else: out_dir = os.path.join(subjects_dir, subject_id) @@ -72,6 +74,7 @@ def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): shutil.copy(in_file, out_file) return out_file + def createoutputdirs(outputs): """create all output directories. If not created, some freesurfer interfaces fail""" for output in list(outputs.values()): @@ -1361,7 +1364,7 @@ class Tkregister2InputSpec(FSTraitedSpec): desc='Invert input LTA before applying') # Output registration file options fsl_out = traits.Either(True, File, argstr='--fslregout %s', - desc='compute an FSL-compatible resgitration matrix') + desc='compute an FSL-compatible resgitration matrix') lta_out = traits.Either(True, File, argstr='--ltaout %s', desc='output registration file (LTA format)') invert_lta_out = traits.Bool(argstr='--ltaout-inv', requires=['lta_in'], @@ -1707,6 +1710,7 @@ class MRIFillOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output file from MRIFill") log_file = File(desc="Output log file from MRIFill") + class MRIFill(FSCommand): """ This program creates hemispheric cutting planes and fills white matter @@ -1743,7 +1747,7 @@ class MRIsInflateInputSpec(FSTraitedSpec): hash_files=False, keep_extension=True, desc="Output file for MRIsInflate") # optional - out_sulc = File( exists=False, + out_sulc = File(exists=False, xor=['no_save_sulc'], desc="Output sulc file") no_save_sulc = traits.Bool(argstr='-no-save-sulc', @@ -1796,13 +1800,14 @@ class SphereInputSpec(FSTraitedSpecOpenMP): desc="Seed for setting random number generator") magic = traits.Bool(argstr="-q", desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu") - in_smoothwm = File( exists=True, copyfile=True, + in_smoothwm = File(exists=True, copyfile=True, desc="Input surface required when -q flag is not selected") class SphereOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output file for Sphere") + class Sphere(FSCommandOpenMP): """ This program will add a template into an average surface @@ -2016,7 +2021,7 @@ class MakeSurfacesInputSpec(FSTraitedSpec): white_only = traits.Bool(argstr="-whiteonly", desc="Undocumented flage") in_aseg = File(argstr="-aseg %s", exists=True, - desc="Input segmentation file") + desc="Input segmentation file") in_T1 = File(argstr="-T1 %s", exists=True, desc="Input brain or T1 file") mgz = traits.Bool( @@ -2103,7 +2108,6 @@ def run(self, **inputs): 'label')) return super(MakeSurfaces, self).run(**inputs) - def _format_arg(self, name, spec, value): if name in ['in_T1', 'in_aseg']: # These inputs do not take full paths as inputs or even basenames @@ -2114,7 +2118,7 @@ def _format_arg(self, name, spec, value): else: prefix = basename if prefix == 'aseg': - return # aseg is already the default + return # aseg is already the default return spec.argstr % prefix elif name in ['orig_white', 'orig_pial']: # these inputs do take full file paths or even basenames @@ -2324,6 +2328,7 @@ def run(self, **inputs): copy2subjdir(self, self.inputs.curvfile2, 'surf') return super(CurvatureStats, self).run(**inputs) + class JacobianInputSpec(FSTraitedSpec): # required in_origsurf = File(argstr="%s", position=-3, mandatory=True, exists=True, @@ -2804,7 +2809,7 @@ class RelabelHypointensitiesInputSpec(FSTraitedSpec): aseg = File(argstr="%s", position=-3, mandatory=True, exists=True, desc="Input aseg file") surf_directory = traits.Directory('.', argstr="%s", position=-2, exists=True, - usedefault=True, + usedefault=True, desc="Directory containing lh.white and rh.white") out_file = File(argstr="%s", position=-1, exists=False, name_source=['aseg'], name_template='%s.hypos.mgz', diff --git a/nipype/interfaces/fsl/__init__.py b/nipype/interfaces/fsl/__init__.py index 9524a731d6..93b1f838f8 100644 --- a/nipype/interfaces/fsl/__init__.py +++ b/nipype/interfaces/fsl/__init__.py @@ -13,8 +13,8 @@ from .model import (Level1Design, FEAT, FEATModel, FILMGLS, FEATRegister, FLAMEO, ContrastMgr, MultipleRegressDesign, L2Model, SMM, MELODIC, SmoothEstimate, Cluster, Randomise, GLM) -from .utils import (AvScale, Smooth, Merge, ExtractROI, Split, ImageMaths, ImageMeants, - ImageStats, FilterRegressor, Overlay, Slicer, +from .utils import (AvScale, Smooth, Merge, ExtractROI, Split, ImageMaths, + ImageMeants, ImageStats, FilterRegressor, Overlay, Slicer, PlotTimeSeries, PlotMotionParams, ConvertXFM, SwapDimensions, PowerSpectrum, Reorient2Std, Complex, InvWarp, WarpUtils, ConvertWarp, WarpPoints, diff --git a/nipype/interfaces/fsl/aroma.py b/nipype/interfaces/fsl/aroma.py index fb8dc82bd8..dc67637270 100644 --- a/nipype/interfaces/fsl/aroma.py +++ b/nipype/interfaces/fsl/aroma.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from ..base import (TraitedSpec, CommandLineInputSpec, CommandLine, File, Directory, traits, isdefined) import os @@ -60,16 +61,18 @@ class ICA_AROMAInputSpec(CommandLineInputSpec): '-aggr: aggressive denoising, i.e. full component regression\n' '-both: both aggressive and non-aggressive denoising (two outputs)') + class ICA_AROMAOutputSpec(TraitedSpec): aggr_denoised_file = File(exists=True, desc='if generated: aggressively denoised volume') nonaggr_denoised_file = File(exists=True, - desc='if generated: non aggressively denoised volume' ) + desc='if generated: non aggressively denoised volume') out_dir = Directory(exists=True, desc='directory contains (in addition to the denoised files): ' 'melodic.ica + classified_motion_components + ' 'classification_overview + feature_scores + melodic_ic_mni)') + class ICA_AROMA(CommandLine): """ Interface for the ICA_AROMA.py script. diff --git a/nipype/interfaces/fsl/base.py b/nipype/interfaces/fsl/base.py index 2bb120e097..608c424d2a 100644 --- a/nipype/interfaces/fsl/base.py +++ b/nipype/interfaces/fsl/base.py @@ -25,14 +25,16 @@ See the docstrings of the individual classes for examples. """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from glob import glob import os from ... import logging from ...utils.filemanip import fname_presuffix -from ..base import traits, isdefined, CommandLine, CommandLineInputSpec, PackageInfo +from ..base import (traits, isdefined, CommandLine, CommandLineInputSpec, + PackageInfo) from ...external.due import BibTeX IFLOGGER = logging.getLogger('interface') @@ -149,17 +151,18 @@ class FSLCommand(CommandLine): input_spec = FSLCommandInputSpec _output_type = None - references_ = [{'entry': BibTeX('@article{JenkinsonBeckmannBehrensWoolrichSmith2012,' - 'author={M. Jenkinson, C.F. Beckmann, T.E. Behrens, ' - 'M.W. Woolrich, and S.M. Smith},' - 'title={FSL},' - 'journal={NeuroImage},' - 'volume={62},' - 'pages={782-790},' - 'year={2012},' - '}'), - 'tags': ['implementation'], - }] + references_ = [{'entry': BibTeX( + '@article{JenkinsonBeckmannBehrensWoolrichSmith2012,' + 'author={M. Jenkinson, C.F. Beckmann, T.E. Behrens, ' + 'M.W. Woolrich, and S.M. Smith},' + 'title={FSL},' + 'journal={NeuroImage},' + 'volume={62},' + 'pages={782-790},' + 'year={2012},' + '}'), + 'tags': ['implementation'], + }] def __init__(self, **inputs): super(FSLCommand, self).__init__(**inputs) diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index 812515fcda..9e5c6cb6d0 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -12,7 +12,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, open import os @@ -23,6 +24,7 @@ InputMultiPath, OutputMultiPath, traits) from .base import (FSLCommand, FSLCommandInputSpec, Info) + class DTIFitInputSpec(FSLCommandInputSpec): dwi = File(exists=True, desc='diffusion weighted image data file', argstr='-k %s', position=0, mandatory=True) @@ -579,10 +581,10 @@ class ProbTrackX(FSLCommand): >>> from nipype.interfaces import fsl >>> pbx = fsl.ProbTrackX(samples_base_name='merged', mask='mask.nii', \ seed='MASK_average_thal_right.nii', mode='seedmask', \ - xfm='trans.mat', n_samples=3, n_steps=10, force_dir=True, opd=True, os2t=True, \ - target_masks = ['targets_MASK1.nii', 'targets_MASK2.nii'], \ - thsamples='merged_thsamples.nii', fsamples='merged_fsamples.nii', phsamples='merged_phsamples.nii', \ - out_dir='.') + xfm='trans.mat', n_samples=3, n_steps=10, force_dir=True, opd=True, \ + os2t=True, target_masks = ['targets_MASK1.nii', 'targets_MASK2.nii'], \ + thsamples='merged_thsamples.nii', fsamples='merged_fsamples.nii', \ + phsamples='merged_phsamples.nii', out_dir='.') >>> pbx.cmdline 'probtrackx --forcedir -m mask.nii --mode=seedmask --nsamples=3 --nsteps=10 --opd --os2t --dir=. --samples=merged --seed=MASK_average_thal_right.nii --targetmasks=targets.txt --xfm=trans.mat' @@ -650,7 +652,8 @@ def _list_outputs(self): outputs['log'] = os.path.abspath( os.path.join(out_dir, 'probtrackx.log')) - # utputs['way_total'] = os.path.abspath(os.path.join(out_dir, 'waytotal')) + # utputs['way_total'] = os.path.abspath(os.path.join(out_dir, + # 'waytotal')) if isdefined(self.inputs.opd is True): if (isinstance(self.inputs.seed, list) and isinstance(self.inputs.seed[0], list)): @@ -795,7 +798,8 @@ def _list_outputs(self): else: out_dir = self.inputs.out_dir - outputs['way_total'] = os.path.abspath(os.path.join(out_dir, 'waytotal')) + outputs['way_total'] = os.path.abspath( + os.path.join(out_dir, 'waytotal')) if isdefined(self.inputs.omatrix1): outputs['network_matrix'] = os.path.abspath( diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index 0beb60b3c0..289d9a1800 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -140,16 +140,17 @@ class TOPUPInputSpec(FSLCommandInputSpec): out_field = File(argstr='--fout=%s', hash_files=False, name_source=['in_file'], name_template='%s_field', desc='name of image file with field (Hz)') - out_warp_prefix = traits.Str("warpfield", argstr='--dfout=%s', hash_files=False, - desc='prefix for the warpfield images (in mm)', - usedefault=True) + out_warp_prefix = traits.Str( + "warpfield", argstr='--dfout=%s', hash_files=False, + desc='prefix for the warpfield images (in mm)', + usedefault=True) out_mat_prefix = traits.Str("xfm", argstr='--rbmout=%s', hash_files=False, desc='prefix for the realignment matrices', usedefault=True) out_jac_prefix = traits.Str("jac", argstr='--jacout=%s', - hash_files=False, - desc='prefix for the warpfield images', - usedefault=True) + hash_files=False, + desc='prefix for the warpfield images', + usedefault=True) out_corrected = File(argstr='--iout=%s', hash_files=False, name_source=['in_file'], name_template='%s_corrected', desc='name of 4D image file with unwarped images') @@ -473,8 +474,8 @@ class EddyInputSpec(FSLCommandInputSpec): 'movement')) dont_peas = traits.Bool(False, argstr='--dont_peas', - desc="Do NOT perform a post-eddy alignment of " - "shells") + desc="Do NOT perform a post-eddy alignment of " + "shells") fwhm = traits.Float(desc=('FWHM for conditioning filter when estimating ' 'the parameters'), argstr='--fwhm=%s') diff --git a/nipype/interfaces/fsl/fix.py b/nipype/interfaces/fsl/fix.py index cd4aacfedb..c42fa37b2c 100644 --- a/nipype/interfaces/fsl/fix.py +++ b/nipype/interfaces/fsl/fix.py @@ -12,7 +12,7 @@ will fail and comment on that. EXAMPLE: -subject_list = ['1','2','3'] +subject_list = ['1', '2', '3'] fix_pipeline = pe.Workflow(name='fix_pipeline') fix_pipeline.base_dir = os.path.abspath('./') @@ -54,7 +54,8 @@ outgraph = fix_pipeline.run() """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from ..base import ( TraitedSpec, @@ -100,7 +101,7 @@ class TrainingSetCreator(BaseInterface): def _run_interface(self, runtime): mel_icas = [] for item in self.inputs.mel_icas_in: - if os.path.exists(os.path.join(item,'hand_labels_noise.txt')): + if os.path.exists(os.path.join(item, 'hand_labels_noise.txt')): mel_icas.append(item) if len(mel_icas) == 0: @@ -111,15 +112,13 @@ def _run_interface(self, runtime): def _list_outputs(self): mel_icas = [] for item in self.inputs.mel_icas_in: - if os.path.exists(os.path.join(item,'hand_labels_noise.txt')): + if os.path.exists(os.path.join(item, 'hand_labels_noise.txt')): mel_icas.append(item) outputs = self._outputs().get() outputs['mel_icas_out'] = mel_icas return outputs - - class FeatureExtractorInputSpec(CommandLineInputSpec): mel_ica = Directory(exists=True, copyfile=False, desc='Melodic output directory or directories', argstr='%s', position=-1) @@ -175,8 +174,6 @@ def _list_outputs(self): return outputs - - class AccuracyTesterInputSpec(CommandLineInputSpec): mel_icas = InputMultiPath(Directory(exists=True), copyfile=False, desc='Melodic output directories', @@ -209,7 +206,6 @@ def _list_outputs(self): return outputs - class ClassifierInputSpec(CommandLineInputSpec): mel_ica = Directory(exists=True, copyfile=False, desc='Melodic output directory or directories', argstr='%s', position=1) @@ -220,6 +216,7 @@ class ClassifierInputSpec(CommandLineInputSpec): artifacts_list_file = File(desc='Text file listing which ICs are artifacts; can be the output from classification or can be created manually') + class ClassifierOutputSpec(TraitedSpec): artifacts_list_file = File(desc='Text file listing which ICs are artifacts; can be the output from classification or can be created manually') @@ -248,8 +245,6 @@ def _list_outputs(self): return outputs - - class CleanerInputSpec(CommandLineInputSpec): artifacts_list_file = File(exists=True, argstr='%s', position=1, mandatory=True, desc='Text file listing which ICs are artifacts; can be the output from classification or can be created manually') @@ -297,4 +292,3 @@ def _list_outputs(self): outputs = self.output_spec().get() outputs['cleaned_functional_file'] = self._get_cleaned_functional_filename(self.inputs.artifacts_list_file) return outputs - diff --git a/nipype/interfaces/fsl/maths.py b/nipype/interfaces/fsl/maths.py index f403c5c402..7bb5c86c5f 100644 --- a/nipype/interfaces/fsl/maths.py +++ b/nipype/interfaces/fsl/maths.py @@ -11,7 +11,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import numpy as np diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index f130c01679..5bfcd99112 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -11,7 +11,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, open import os @@ -51,8 +52,9 @@ class Level1DesignInputSpec(BaseInterfaceInputSpec): mandatory=True, desc=("name of basis function and options e.g., " "{'dgamma': {'derivs': True}}"),) - orthogonalization = traits.Dict(traits.Int, traits.Dict(traits.Int, - traits.Either(traits.Bool,traits.Int)), + orthogonalization = traits.Dict( + traits.Int, traits.Dict(traits.Int, + traits.Either(traits.Bool, traits.Int)), desc=("which regressors to make orthogonal e.g., " "{1: {0:0,1:0,2:0}, 2: {0:1,1:1,2:0}} to make the second " "regressor in a 2-regressor model orthogonal to the first."), @@ -159,13 +161,13 @@ def _create_ev_files( basis_key = "hrf" elif basis_key == "gamma": try: - _ = ev_parameters['gammasigma'] + _ = ev_parameters['gammasigma'] except KeyError: - ev_parameters['gammasigma'] = 3 + ev_parameters['gammasigma'] = 3 try: - _ = ev_parameters['gammadelay'] + _ = ev_parameters['gammadelay'] except KeyError: - ev_parameters['gammadelay'] = 6 + ev_parameters['gammadelay'] = 6 ev_template = load_template('feat_ev_'+basis_key+'.tcl') ev_none = load_template('feat_ev_none.tcl') ev_ortho = load_template('feat_ev_ortho.tcl') @@ -200,9 +202,9 @@ def _create_ev_files( ev_parameters['ev_num'] = num_evs[0] ev_parameters['ev_name'] = name ev_parameters['tempfilt_yn'] = do_tempfilter - if not 'basisorth' in ev_parameters: + if 'basisorth' not in ev_parameters: ev_parameters['basisorth'] = 1 - if not 'basisfnum' in ev_parameters: + if 'basisfnum' not in ev_parameters: ev_parameters['basisfnum'] = 1 try: ev_parameters['fsldir'] = os.environ['FSLDIR'] @@ -700,7 +702,6 @@ class FILMGLS(FSLCommand): LooseVersion(Info.version()) > LooseVersion('5.0.4')): input_spec = FILMGLSInputSpec505 - def _get_pe_files(self, cwd): files = None if isdefined(self.inputs.design_file): @@ -766,7 +767,6 @@ def _list_outputs(self): varcopes = [] zstats = [] tstats = [] - neffs = [] for i in range(numtcons): copes.append(self._gen_fname( 'cope%d.nii' % (base_contrast + i), cwd=pth)) @@ -1100,7 +1100,6 @@ class ContrastMgr(FSLCommand): input_spec = ContrastMgrInputSpec output_spec = ContrastMgrOutputSpec - def _run_interface(self, runtime): # The returncode is meaningless in ContrastMgr. So check the output # in stderr and if it's set, then update the returncode @@ -1819,29 +1818,29 @@ def _format_arg(self, name, spec, value): class DualRegressionInputSpec(FSLCommandInputSpec): in_files = InputMultiPath(File(exists=True), argstr="%s", mandatory=True, - position=-1, sep=" ", - desc="List all subjects' preprocessed, standard-space 4D datasets",) + position=-1, sep=" ", + desc="List all subjects' preprocessed, standard-space 4D datasets",) group_IC_maps_4D = File(exists=True, argstr="%s", mandatory=True, position=1, - desc="4D image containing spatial IC maps (melodic_IC) from the " - "whole-group ICA analysis") + desc="4D image containing spatial IC maps (melodic_IC) from the " + "whole-group ICA analysis") des_norm = traits.Bool(True, argstr="%i", position=2, usedefault=True, - desc="Whether to variance-normalise the timecourses used as the " - "stage-2 regressors; True is default and recommended") + desc="Whether to variance-normalise the timecourses used as the " + "stage-2 regressors; True is default and recommended") one_sample_group_mean = traits.Bool(argstr="-1", position=3, - desc="perform 1-sample group-mean test instead of generic " - "permutation test") + desc="perform 1-sample group-mean test instead of generic " + "permutation test") design_file = File(exists=True, argstr="%s", position=3, - desc="Design matrix for final cross-subject modelling with " - "randomise") + desc="Design matrix for final cross-subject modelling with " + "randomise") con_file = File(exists=True, argstr="%s", position=4, - desc="Design contrasts for final cross-subject modelling with " - "randomise") + desc="Design contrasts for final cross-subject modelling with " + "randomise") n_perm = traits.Int(argstr="%i", mandatory=True, position=5, - desc="Number of permutations for randomise; set to 1 for just raw " - "tstat output, set to 0 to not run randomise at all.") + desc="Number of permutations for randomise; set to 1 for just raw " + "tstat output, set to 0 to not run randomise at all.") out_dir = Directory("output", argstr="%s", usedefault=True, position=6, - desc="This directory will be created to hold all output and logfiles", - genfile=True) + desc="This directory will be created to hold all output and logfiles", + genfile=True) class DualRegressionOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index 4ffeead842..551d8e6dd8 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -11,7 +11,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, open import os @@ -1023,7 +1024,7 @@ def write_config(self, configfile): try: fid = open(configfile, 'w+') except IOError: - print ('unable to create config_file %s' % (configfile)) + print('unable to create config_file %s' % (configfile)) for item in list(self.inputs.get().items()): fid.write('%s\n' % (item)) diff --git a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py index b7573f7454..daf5472bad 100644 --- a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py +++ b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py @@ -8,25 +8,26 @@ def test_level1design(tmpdir): old = tmpdir.chdir() l = Level1Design() runinfo = dict(cond=[{'name': 'test_condition', 'onset': [0, 10], - 'duration':[10, 10]}],regress=[]) + 'duration':[10, 10]}], regress=[]) runidx = 0 contrasts = Undefined do_tempfilter = False orthogonalization = {} - basic_ev_parameters = {'temporalderiv':False} + basic_ev_parameters = {'temporalderiv': False} convolution_variants = [ - ('custom', 7, {'temporalderiv':False, 'bfcustompath':'/some/path'}), + ('custom', 7, {'temporalderiv': False, 'bfcustompath': '/some/path'}), ('hrf', 3, basic_ev_parameters), ('dgamma', 3, basic_ev_parameters), ('gamma', 2, basic_ev_parameters), ('none', 0, basic_ev_parameters) ] for key, val, ev_parameters in convolution_variants: - output_num, output_txt = Level1Design._create_ev_files(l, os.getcwd(), - runinfo, runidx, - ev_parameters, - orthogonalization, - contrasts, - do_tempfilter, - key) + output_num, output_txt = Level1Design._create_ev_files( + l, os.getcwd(), + runinfo, runidx, + ev_parameters, + orthogonalization, + contrasts, + do_tempfilter, + key) assert "set fmri(convolve1) {0}".format(val) in output_txt diff --git a/nipype/interfaces/fsl/tests/test_base.py b/nipype/interfaces/fsl/tests/test_base.py index 030306e929..61d2994eae 100644 --- a/nipype/interfaces/fsl/tests/test_base.py +++ b/nipype/interfaces/fsl/tests/test_base.py @@ -9,6 +9,7 @@ import pytest + @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fslversion(): ver = fsl.Info.version() @@ -60,13 +61,18 @@ def test_FSLCommand2(): @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") -@pytest.mark.parametrize("args, desired_name", - [({}, {"file": 'foo.nii.gz'}), # just the filename - ({"suffix": '_brain'}, {"file": 'foo_brain.nii.gz'}), # filename with suffix - ({"suffix": '_brain', "cwd": '/data'}, - {"dir": '/data', "file": 'foo_brain.nii.gz'}), # filename with suffix and working directory - ({"suffix": '_brain.mat', "change_ext": False}, {"file": 'foo_brain.mat'}) # filename with suffix and no file extension change - ]) +@pytest.mark.parametrize( + "args, desired_name", + [({}, {"file": 'foo.nii.gz'}), # just the filename + # filename with suffix + ({"suffix": '_brain'}, {"file": 'foo_brain.nii.gz'}), + ({"suffix": '_brain', "cwd": '/data'}, + # filename with suffix and working directory + {"dir": '/data', "file": 'foo_brain.nii.gz'}), + # filename with suffix and no file extension change + ({"suffix": '_brain.mat', "change_ext": False}, + {"file": 'foo_brain.mat'}) + ]) def test_gen_fname(args, desired_name): # Test _gen_fname method of FSLCommand cmd = fsl.FSLCommand(command='junk', output_type='NIFTI_GZ') @@ -77,4 +83,3 @@ def test_gen_fname(args, desired_name): else: desired = os.path.join(pth, desired_name["file"]) assert fname == desired - diff --git a/nipype/interfaces/fsl/tests/test_dti.py b/nipype/interfaces/fsl/tests/test_dti.py index dab4d825ae..a07083ca1c 100644 --- a/nipype/interfaces/fsl/tests/test_dti.py +++ b/nipype/interfaces/fsl/tests/test_dti.py @@ -223,7 +223,7 @@ def test_Proj_thresh(): results = proj3.run(volumes=['inp1', 'inp3', 'inp2'], threshold=2) assert results.runtime.cmdline == 'proj_thresh inp1 inp3 inp2 2' assert results.runtime.returncode != 0 - assert isinstance(results.interface.inputs.volumes, list) == True + assert isinstance(results.interface.inputs.volumes, list) assert results.interface.inputs.threshold == 2 # test arguments for opt_map @@ -399,4 +399,3 @@ def test_distancemap(create_files_in_directory): mapper.inputs.local_max_file = "max.nii" assert mapper.cmdline == \ "distancemap --out=%s --in=a.nii --localmax=max.nii" % os.path.join(newdir, "a_dstmap.nii") - diff --git a/nipype/interfaces/fsl/tests/test_epi.py b/nipype/interfaces/fsl/tests/test_epi.py index 17958426af..58f14f8584 100644 --- a/nipype/interfaces/fsl/tests/test_epi.py +++ b/nipype/interfaces/fsl/tests/test_epi.py @@ -30,7 +30,8 @@ def test_eddy_correct2(create_files_in_directory): assert eddy.cmdline == 'eddy_correct %s foo_eddc.nii 100' % filelist[0] # .run based parameter setting - eddy2 = fsl.EddyCorrect(in_file=filelist[0], out_file='foo_ec.nii', ref_num=20) + eddy2 = fsl.EddyCorrect(in_file=filelist[0], out_file='foo_ec.nii', + ref_num=20) assert eddy2.cmdline == 'eddy_correct %s foo_ec.nii 20' % filelist[0] # test arguments for opt_map diff --git a/nipype/interfaces/fsl/tests/test_maths.py b/nipype/interfaces/fsl/tests/test_maths.py index 408a74712f..9c57d73b6c 100644 --- a/nipype/interfaces/fsl/tests/test_maths.py +++ b/nipype/interfaces/fsl/tests/test_maths.py @@ -146,7 +146,7 @@ def test_stdimage(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command - stder = fsl.StdImage(in_file="a.nii",out_file="b.nii") + stder = fsl.StdImage(in_file="a.nii", out_file="b.nii") # Test the underlying command assert stder.cmd == "fslmaths" @@ -156,8 +156,8 @@ def test_stdimage(create_files_in_directory_plus_output_type): # Test the other dimensions cmdline = "fslmaths a.nii -{}std b.nii" - for dim in ["X","Y","Z","T"]: - stder.inputs.dimension=dim + for dim in ["X", "Y", "Z", "T"]: + stder.inputs.dimension = dim assert stder.cmdline == cmdline.format(dim) # Test the auto naming @@ -438,6 +438,3 @@ def test_tempfilt(create_files_in_directory_plus_output_type): filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma=64) assert filt.cmdline == \ "fslmaths a.nii -bptf 64.000000 -1.000000 {}".format(os.path.join(testdir, "a_filt{}".format(out_ext))) - - - diff --git a/nipype/interfaces/fsl/tests/test_model.py b/nipype/interfaces/fsl/tests/test_model.py index b2e3f8571c..79824ef596 100644 --- a/nipype/interfaces/fsl/tests/test_model.py +++ b/nipype/interfaces/fsl/tests/test_model.py @@ -15,14 +15,16 @@ def test_MultipleRegressDesign(tmpdir): tmpdir.chdir() foo = fsl.MultipleRegressDesign() - foo.inputs.regressors = dict(voice_stenght=[1, 1, 1], age=[0.2, 0.4, 0.5], BMI=[1, -1, 2]) + foo.inputs.regressors = dict(voice_stenght=[1, 1, 1], age=[0.2, 0.4, 0.5], + BMI=[1, -1, 2]) con1 = ['voice_and_age', 'T', ['age', 'voice_stenght'], [0.5, 0.5]] con2 = ['just_BMI', 'T', ['BMI'], [1]] foo.inputs.contrasts = [con1, con2, ['con3', 'F', [con1, con2]]] res = foo.run() for ii in ["mat", "con", "fts", "grp"]: - assert getattr(res.outputs, "design_"+ii) == tmpdir.join('design.'+ii).strpath + assert getattr( + res.outputs, "design_"+ii) == tmpdir.join('design.'+ii).strpath design_mat_expected_content = """/NumWaves 3 /NumPoints 3 @@ -63,5 +65,3 @@ def test_MultipleRegressDesign(tmpdir): """ for ii in ["mat", "con", "fts", "grp"]: assert tmpdir.join('design.'+ii).read() == eval("design_"+ii+"_expected_content") - - diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index 3d75d514a6..2ab596c88b 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -3,12 +3,13 @@ from builtins import str # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from builtins import open, open +from builtins import open import os from copy import deepcopy -import pytest, pdb +import pytest +import pdb from nipype.utils.filemanip import split_filename, filename_to_list from .. import preprocess as fsl from nipype.interfaces.fsl import Info @@ -101,7 +102,7 @@ def test_fast(setup_infile): fasted2 = fsl.FAST(in_files=[tmp_infile, tmp_infile], verbose=True) assert faster.cmd == 'fast' - assert faster.inputs.verbose == True + assert faster.inputs.verbose assert faster.inputs.manual_seg == Undefined assert faster.inputs != fasted.inputs assert fasted.cmdline == 'fast -v -S 1 %s' % (tmp_infile) @@ -238,7 +239,8 @@ def test_flirt(setup_flirt): axfm = deepcopy(flirter) axfm.inputs.apply_xfm = True # in_matrix_file or uses_qform must be defined - with pytest.raises(RuntimeError): axfm.cmdline + with pytest.raises(RuntimeError): + axfm.cmdline axfm2 = deepcopy(axfm) # test uses_qform axfm.inputs.uses_qform = True @@ -510,6 +512,7 @@ def test_fnirt(setup_flirt): assert fnirt._list_outputs()['out_intensitymap_file'] == [ intmap_image, intmap_txt] + @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_applywarp(setup_flirt): tmpdir, infile, reffile = setup_flirt @@ -555,11 +558,11 @@ def setup_fugue(tmpdir): @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @pytest.mark.parametrize("attr, out_file", [ - ({"save_unmasked_fmap":True, "fmap_in_file":"infile", "mask_file":"infile", "output_type":"NIFTI_GZ"}, + ({"save_unmasked_fmap": True, "fmap_in_file": "infile", "mask_file": "infile", "output_type": "NIFTI_GZ"}, 'fmap_out_file'), - ({"save_unmasked_shift":True, "fmap_in_file":"infile", "dwell_time":1.e-3, "mask_file":"infile", "output_type": "NIFTI_GZ"}, + ({"save_unmasked_shift": True, "fmap_in_file": "infile", "dwell_time": 1.e-3, "mask_file": "infile", "output_type": "NIFTI_GZ"}, "shift_out_file"), - ({"in_file":"infile", "mask_file":"infile", "shift_in_file":"infile", "output_type":"NIFTI_GZ"}, + ({"in_file": "infile", "mask_file": "infile", "shift_in_file": "infile", "output_type": "NIFTI_GZ"}, 'unwarped_file') ]) def test_fugue(setup_fugue, attr, out_file): @@ -568,11 +571,13 @@ def test_fugue(setup_fugue, attr, out_file): fugue = fsl.FUGUE() for key, value in attr.items(): - if value == "infile": setattr(fugue.inputs, key, infile) - else: setattr(fugue.inputs, key, value) + if value == "infile": + setattr(fugue.inputs, key, infile) + else: + setattr(fugue.inputs, key, value) res = fugue.run() - assert isdefined(getattr(res.outputs,out_file)) + assert isdefined(getattr(res.outputs, out_file)) trait_spec = fugue.inputs.trait(out_file) out_name = trait_spec.name_template % 'dumbfile' out_name += '.nii.gz' diff --git a/nipype/interfaces/fsl/tests/test_utils.py b/nipype/interfaces/fsl/tests/test_utils.py index 66b91cf96f..46a6f4a2f0 100644 --- a/nipype/interfaces/fsl/tests/test_utils.py +++ b/nipype/interfaces/fsl/tests/test_utils.py @@ -302,4 +302,3 @@ def test_swapdims(create_files_in_directory_plus_output_type): # Test that we can set an output name swap.inputs.out_file = "b.nii" assert swap.cmdline == "fslswapdim a.nii x y z b.nii" - diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index b28a4df425..385c4ef5ac 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -16,7 +16,8 @@ datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import map, range import os diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 0793b955bd..9fb0dae0c7 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -18,7 +18,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import object, zip, filter, range, open, str import glob @@ -60,6 +61,7 @@ iflogger = logging.getLogger('interface') + def copytree(src, dst, use_hardlink=False): """Recursively copy a directory tree using nipype.utils.filemanip.copyfile() @@ -190,8 +192,8 @@ class DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): 'it with')) regexp_substitutions = \ InputMultiPath(traits.Tuple(Str, Str), - desc=('List of 2-tuples reflecting a pair of a '\ - 'Python regexp pattern and a replacement '\ + desc=('List of 2-tuples reflecting a pair of a ' + 'Python regexp pattern and a replacement ' 'string. Invoked after string `substitutions`')) _outputs = traits.Dict(Str, value={}, usedefault=True) @@ -199,12 +201,12 @@ class DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): desc='remove dest directory when copying dirs') # AWS S3 data attributes - creds_path = Str(desc='Filepath to AWS credentials file for S3 bucket '\ - 'access; if not specified, the credentials will '\ - 'be taken from the AWS_ACCESS_KEY_ID and '\ - 'AWS_SECRET_ACCESS_KEY environment variables') - encrypt_bucket_keys = traits.Bool(desc='Flag indicating whether to use S3 '\ - 'server-side AES-256 encryption') + creds_path = Str(desc='Filepath to AWS credentials file for S3 bucket ' + 'access; if not specified, the credentials will ' + 'be taken from the AWS_ACCESS_KEY_ID and ' + 'AWS_SECRET_ACCESS_KEY environment variables') + encrypt_bucket_keys = traits.Bool(desc='Flag indicating whether to use S3 ' + 'server-side AES-256 encryption') # Set this if user wishes to override the bucket with their own bucket = traits.Any(desc='Boto3 S3 bucket for manual override of bucket') # Set this if user wishes to have local copy of files as well @@ -588,7 +590,7 @@ def _upload_to_s3(self, bucket, src, dst): for root, dirs, files in os.walk(src): src_files.extend([os.path.join(root, fil) for fil in files]) # Make the dst files have the dst folder as base dir - dst_files = [os.path.join(dst, src_f.split(src)[1]) \ + dst_files = [os.path.join(dst, src_f.split(src)[1]) for src_f in src_files] else: src_files = [src] @@ -622,7 +624,7 @@ def _upload_to_s3(self, bucket, src, dst): iflogger.info('Uploading %s to S3 bucket, %s, as %s...', src_f, bucket.name, dst_f) if self.inputs.encrypt_bucket_keys: - extra_args = {'ServerSideEncryption' : 'AES256'} + extra_args = {'ServerSideEncryption': 'AES256'} else: extra_args = {} bucket.upload_file(src_f, dst_k, ExtraArgs=extra_args, @@ -669,7 +671,7 @@ def _list_outputs(self): 's3_datasink_' + bucket_name) outdir = local_out_exception # Log local copying directory - iflogger.info('Access to S3 failed! Storing outputs locally at: '\ + iflogger.info('Access to S3 failed! Storing outputs locally at: ' '%s\nError: %s', outdir, exc) else: s3dir = '' @@ -768,11 +770,11 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): desc='Use anonymous connection to s3. If this is set to True, boto may print' + ' a urlopen error, but this does not prevent data from being downloaded.') region = Str('us-east-1', usedefault=True, - desc='Region of s3 bucket') + desc='Region of s3 bucket') bucket = Str(mandatory=True, - desc='Amazon S3 bucket where your data is stored') + desc='Amazon S3 bucket where your data is stored') bucket_path = Str('', usedefault=True, - desc='Location within your bucket for subject data.') + desc='Location within your bucket for subject data.') local_directory = Directory(exists=True, desc='Path to the local directory for subject data to be downloaded ' 'and accessed. Should be on HDFS for Spark jobs.') @@ -781,8 +783,8 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): sort_filelist = traits.Bool(mandatory=True, desc='Sort the filelist that matches the template') template = Str(mandatory=True, - desc='Layout used to get files. Relative to bucket_path if defined.' - 'Uses regex rather than glob style formatting.') + desc='Layout used to get files. Relative to bucket_path if defined.' + 'Uses regex rather than glob style formatting.') template_args = traits.Dict(key_trait=Str, value_trait=traits.List(traits.List), desc='Information to plug into template') @@ -947,13 +949,13 @@ def _list_outputs(self): # Outputs are currently stored as locations on S3. # We must convert to the local location specified # and download the files. - for key,val in outputs.items(): + for key, val in outputs.items(): # This will basically be either list-like or string-like: # if it's an instance of a list, we'll iterate through it. # If it isn't, it's string-like (string, unicode), we # convert that value directly. if isinstance(val, (list, tuple, set)): - for i,path in enumerate(val): + for i, path in enumerate(val): outputs[key][i] = self.s3tolocal(path, bkt) else: outputs[key] = self.s3tolocal(val, bkt) @@ -989,7 +991,7 @@ class DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): sort_filelist = traits.Bool(mandatory=True, desc='Sort the filelist that matches the template') template = Str(mandatory=True, - desc='Layout used to get files. relative to base directory if defined') + desc='Layout used to get files. relative to base directory if defined') template_args = traits.Dict(key_trait=Str, value_trait=traits.List(traits.List), desc='Information to plug into template') @@ -1190,15 +1192,15 @@ class SelectFilesInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): desc="When matching mutliple files, return them" " in sorted order.") raise_on_empty = traits.Bool(True, usedefault=True, - desc="Raise an exception if a template pattern " - "matches no files.") + desc="Raise an exception if a template pattern " + "matches no files.") force_lists = traits.Either(traits.Bool(), traits.List(Str()), default=False, usedefault=True, desc=("Whether to return outputs as a list even" - " when only one file matches the template. " - "Either a boolean that applies to all output " - "fields or a list of output field names to " - "coerce to a list")) + " when only one file matches the template. " + "Either a boolean that applies to all output " + "fields or a list of output field names to " + "coerce to a list")) class SelectFiles(IOBase): @@ -1342,9 +1344,8 @@ class DataFinderInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): Str(), mandatory=True,) match_regex = Str('(.+)', - usedefault=True, - desc=("Regular expression for matching " - "paths.")) + usedefault=True, + desc=("Regular expression for matching paths.")) ignore_regexes = traits.List(desc=("List of regular expressions, " "if any match the path it will be " "ignored.") @@ -1870,9 +1871,9 @@ class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): _outputs = traits.Dict(Str, value={}, usedefault=True) server = Str(mandatory=True, - requires=['user', 'pwd'], - xor=['config'] - ) + requires=['user', 'pwd'], + xor=['config'] + ) user = Str() pwd = traits.Password() @@ -2142,8 +2143,8 @@ def _list_outputs(self): class MySQLSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): host = Str('localhost', mandatory=True, - requires=['username', 'password'], - xor=['config'], usedefault=True) + requires=['username', 'password'], + xor=['config'], usedefault=True) config = File(mandatory=True, xor=['host'], desc="MySQL Options File (same format as my.cnf)") database_name = Str( @@ -2207,11 +2208,11 @@ class SSHDataGrabberInputSpec(DataGrabberInputSpec): download_files = traits.Bool(True, usedefault=True, desc='If false it will return the file names without downloading them') base_directory = Str(mandatory=True, - desc='Path to the base directory consisting of subject data.') + desc='Path to the base directory consisting of subject data.') template_expression = traits.Enum(['fnmatch', 'regexp'], usedefault=True, desc='Use either fnmatch or regexp to express templates') ssh_log_to_file = Str('', usedefault=True, - desc='If set SSH commands will be logged to the given file') + desc='If set SSH commands will be logged to the given file') class SSHDataGrabber(DataGrabber): diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index 0d8aa29e16..c96647d4b9 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -2,7 +2,8 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ General matlab interface code """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os @@ -25,7 +26,7 @@ def get_matlab_command(): resource_monitor=False, terminal_output='allatonce').run() matlab_path = res.runtime.stdout.strip() - except Exception as e: + except Exception: return None return matlab_cmd diff --git a/nipype/interfaces/meshfix.py b/nipype/interfaces/meshfix.py index 466190468a..8983d6e351 100644 --- a/nipype/interfaces/meshfix.py +++ b/nipype/interfaces/meshfix.py @@ -10,7 +10,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op from ..utils.filemanip import split_filename diff --git a/nipype/interfaces/minc/__init__.py b/nipype/interfaces/minc/__init__.py index d3235c987d..ecf943e811 100644 --- a/nipype/interfaces/minc/__init__.py +++ b/nipype/interfaces/minc/__init__.py @@ -2,7 +2,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC -`_ command line tools. This +`_ command line tools. This module was written to work with MINC version 2.2.00. Author: Carlo Hamalainen diff --git a/nipype/interfaces/minc/base.py b/nipype/interfaces/minc/base.py index e4b8592adf..70ba596a59 100644 --- a/nipype/interfaces/minc/base.py +++ b/nipype/interfaces/minc/base.py @@ -2,13 +2,14 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC -`_ command line tools. This -module was written to work with MINC version 2.2.00. +`_ command line tools. +This module was written to work with MINC version 2.2.00. Author: Carlo Hamalainen http://carlo-hamalainen.net """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import object import os import os.path @@ -125,6 +126,8 @@ def aggregate_filename(files, new_suffix): path = os.getcwd() if common_prefix == '': - return os.path.abspath(os.path.join(path, os.path.splitext(files[0])[0] + '_' + new_suffix + '.mnc')) + return os.path.abspath(os.path.join( + path, os.path.splitext(files[0])[0] + '_' + new_suffix + '.mnc')) else: - return os.path.abspath(os.path.join(path, common_prefix + '_' + new_suffix + '.mnc')) + return os.path.abspath(os.path.join( + path, common_prefix + '_' + new_suffix + '.mnc')) diff --git a/nipype/interfaces/minc/minc.py b/nipype/interfaces/minc/minc.py index a6fc9f5902..1c0d08e353 100644 --- a/nipype/interfaces/minc/minc.py +++ b/nipype/interfaces/minc/minc.py @@ -16,7 +16,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import glob diff --git a/nipype/interfaces/minc/testdata.py b/nipype/interfaces/minc/testdata.py index c586bd6f99..8537e5b98b 100644 --- a/nipype/interfaces/minc/testdata.py +++ b/nipype/interfaces/minc/testdata.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from ...testing import example_data @@ -9,5 +10,6 @@ nlp_config = example_data('minc_nlp.conf') + def nonempty_minc_data(i, shape='2D'): return example_data('minc_test_%s_%.2d.mnc' % (shape, i,)) diff --git a/nipype/interfaces/mipav/__init__.py b/nipype/interfaces/mipav/__init__.py index d3a6b785fa..8a9e08fd9b 100644 --- a/nipype/interfaces/mipav/__init__.py +++ b/nipype/interfaces/mipav/__init__.py @@ -1,3 +1,12 @@ # -*- coding: utf-8 -*- -from __future__ import print_function, division, unicode_literals, absolute_import -from .developer import JistLaminarVolumetricLayering, JistBrainMgdmSegmentation, JistLaminarProfileGeometry, JistLaminarProfileCalculator, MedicAlgorithmN3, JistLaminarROIAveraging, MedicAlgorithmLesionToads, JistBrainMp2rageSkullStripping, JistCortexSurfaceMeshInflation, RandomVol, MedicAlgorithmImageCalculator, JistBrainMp2rageDuraEstimation, JistLaminarProfileSampling, MedicAlgorithmMipavReorient, MedicAlgorithmSPECTRE2010, JistBrainPartialVolumeFilter, JistIntensityMp2rageMasking, MedicAlgorithmThresholdToBinaryMask +from __future__ import (print_function, division, unicode_literals, + absolute_import) +from .developer import ( + JistLaminarVolumetricLayering, JistBrainMgdmSegmentation, + JistLaminarProfileGeometry, JistLaminarProfileCalculator, MedicAlgorithmN3, + JistLaminarROIAveraging, MedicAlgorithmLesionToads, + JistBrainMp2rageSkullStripping, JistCortexSurfaceMeshInflation, RandomVol, + MedicAlgorithmImageCalculator, JistBrainMp2rageDuraEstimation, + JistLaminarProfileSampling, MedicAlgorithmMipavReorient, + MedicAlgorithmSPECTRE2010, JistBrainPartialVolumeFilter, + JistIntensityMp2rageMasking, MedicAlgorithmThresholdToBinaryMask) diff --git a/nipype/interfaces/mne/base.py b/nipype/interfaces/mne/base.py index 5196ddf5be..2b469e02b3 100644 --- a/nipype/interfaces/mne/base.py +++ b/nipype/interfaces/mne/base.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, bytes import os.path as op diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index eb34de974e..11110a1e9f 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -8,7 +8,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from io import open import os.path as op @@ -86,16 +87,13 @@ def read_mrtrix_streamlines(in_file, header, as_generator=True): bytesize = pt_cols * 4 def points_per_track(offset): - n_streams = 0 - n_points = 0 track_points = [] iflogger.info('Identifying the number of points per tract...') all_str = fileobj.read() num_triplets = int(len(all_str) / bytesize) pts = np.ndarray(shape=(num_triplets, pt_cols), dtype='f4', buffer=all_str) - nonfinite_list = np.where(np.isfinite(pts[:, 2]) == False) + nonfinite_list = np.where(np.invert(np.isfinite(pts[:, 2]))) nonfinite_list = list(nonfinite_list[0])[0:-1] # Converts numpy array to list, removes the last value - nonfinite_list_bytes = [offset + x * bytesize for x in nonfinite_list] for idx, value in enumerate(nonfinite_list): if idx == 0: track_points.append(nonfinite_list[idx]) diff --git a/nipype/interfaces/mrtrix/preprocess.py b/nipype/interfaces/mrtrix/preprocess.py index becee5088f..286c4c749e 100644 --- a/nipype/interfaces/mrtrix/preprocess.py +++ b/nipype/interfaces/mrtrix/preprocess.py @@ -9,7 +9,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op diff --git a/nipype/interfaces/mrtrix/tensors.py b/nipype/interfaces/mrtrix/tensors.py index 025d45b2e5..3ff373ac4e 100644 --- a/nipype/interfaces/mrtrix/tensors.py +++ b/nipype/interfaces/mrtrix/tensors.py @@ -9,7 +9,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op import numpy as np @@ -236,9 +237,7 @@ def _gen_outfilename(self): def concat_files(bvec_file, bval_file, invert_x, invert_y, invert_z): bvecs = np.loadtxt(bvec_file) bvals = np.loadtxt(bval_file) - flip = False if np.shape(bvecs)[0] > np.shape(bvecs)[1]: - flip = True bvecs = np.transpose(bvecs) if invert_x: bvecs[0, :] = -bvecs[0, :] diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index c013d7b04a..baccd7e32e 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -9,7 +9,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import os.path as op diff --git a/nipype/interfaces/mrtrix3/base.py b/nipype/interfaces/mrtrix3/base.py index 0d91c3d56d..2d7628c2e2 100644 --- a/nipype/interfaces/mrtrix3/base.py +++ b/nipype/interfaces/mrtrix3/base.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from ... import logging from ..base import (CommandLineInputSpec, CommandLine, traits, File, isdefined) diff --git a/nipype/interfaces/mrtrix3/connectivity.py b/nipype/interfaces/mrtrix3/connectivity.py index 3b79b16442..adbe82aaf6 100644 --- a/nipype/interfaces/mrtrix3/connectivity.py +++ b/nipype/interfaces/mrtrix3/connectivity.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import os.path as op diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py index 0eedc3f449..4bb45399d6 100644 --- a/nipype/interfaces/mrtrix3/preprocess.py +++ b/nipype/interfaces/mrtrix3/preprocess.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op diff --git a/nipype/interfaces/mrtrix3/reconst.py b/nipype/interfaces/mrtrix3/reconst.py index f7ea4f01e4..b42994b96c 100644 --- a/nipype/interfaces/mrtrix3/reconst.py +++ b/nipype/interfaces/mrtrix3/reconst.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op diff --git a/nipype/interfaces/mrtrix3/tracking.py b/nipype/interfaces/mrtrix3/tracking.py index f2cc9c7c75..894de6d403 100644 --- a/nipype/interfaces/mrtrix3/tracking.py +++ b/nipype/interfaces/mrtrix3/tracking.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op diff --git a/nipype/interfaces/mrtrix3/utils.py b/nipype/interfaces/mrtrix3/utils.py index 9f319456d6..822613507d 100644 --- a/nipype/interfaces/mrtrix3/utils.py +++ b/nipype/interfaces/mrtrix3/utils.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os.path as op @@ -20,7 +21,6 @@ from .base import MRTrix3BaseInputSpec, MRTrix3Base - class BrainMaskInputSpec(MRTrix3BaseInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, desc='input diffusion weighted images') diff --git a/nipype/interfaces/niftyseg/base.py b/nipype/interfaces/niftyseg/base.py index a84fb9eb62..fe24888812 100644 --- a/nipype/interfaces/niftyseg/base.py +++ b/nipype/interfaces/niftyseg/base.py @@ -16,7 +16,8 @@ -------- See the docstrings of the individual classes for examples. """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from ..niftyfit.base import NiftyFitCommand diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index db47b57e8b..7154a77d3a 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -23,6 +23,7 @@ BaseInterfaceInputSpec, File, InputMultiPath) IFLOGGER = logging.getLogger('interface') + class SignalExtractionInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc='4-D fMRI nii file') label_files = InputMultiPath(File(exists=True), mandatory=True, @@ -52,12 +53,14 @@ class SignalExtractionInputSpec(BaseInterfaceInputSpec): '(instead of just regions).') detrend = traits.Bool(False, usedefault=True, desc='If True, perform detrending using nilearn.') + class SignalExtractionOutputSpec(TraitedSpec): out_file = File(exists=True, desc='tsv file containing the computed ' 'signals, with as many columns as there are labels and as ' 'many rows as there are timepoints in in_file, plus a ' 'header row with values from class_labels') + class SignalExtraction(BaseInterface): ''' Extracts signals over tissue classes or brain regions @@ -100,15 +103,15 @@ def _process_inputs(self): maskers = [] # determine form of label files, choose appropriate nilearn masker - if np.amax(label_data.get_data()) > 1: # 3d label file + if np.amax(label_data.get_data()) > 1: # 3d label file n_labels = np.amax(label_data.get_data()) maskers.append(nl.NiftiLabelsMasker(label_data)) - else: # 4d labels + else: # 4d labels n_labels = label_data.get_data().shape[3] - if self.inputs.incl_shared_variance: # 4d labels, independent computation + if self.inputs.incl_shared_variance: # 4d labels, independent computation for img in nli.iter_img(label_data): maskers.append(nl.NiftiMapsMasker(self._4d(img.get_data(), img.affine))) - else: # 4d labels, one computation fitting all + else: # 4d labels, one computation fitting all maskers.append(nl.NiftiMapsMasker(label_data)) # check label list size @@ -124,8 +127,8 @@ def _process_inputs(self): self.inputs.label_files)) if self.inputs.include_global: - global_label_data = label_data.get_data().sum(axis=3) # sum across all regions - global_label_data = np.rint(global_label_data).astype(int).clip(0, 1) # binarize + global_label_data = label_data.get_data().sum(axis=3) # sum across all regions + global_label_data = np.rint(global_label_data).astype(int).clip(0, 1) # binarize global_label_data = self._4d(global_label_data, label_data.affine) global_masker = nl.NiftiLabelsMasker(global_label_data, detrend=self.inputs.detrend) maskers.insert(0, global_masker) diff --git a/nipype/interfaces/nipy/model.py b/nipype/interfaces/nipy/model.py index 58c07b5b95..fc091c0217 100644 --- a/nipype/interfaces/nipy/model.py +++ b/nipype/interfaces/nipy/model.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, str, bytes import os @@ -29,7 +30,6 @@ from nipy.modalities.fmri.experimental_paradigm import BlockParadigm - class FitGLMInputSpec(BaseInterfaceInputSpec): session_info = traits.List(minlen=1, maxlen=1, mandatory=True, desc=('Session specific information generated by' @@ -285,7 +285,6 @@ def _run_interface(self, runtime): self._z_maps = [] for contrast_def in self.inputs.contrasts: name = contrast_def[0] - _ = contrast_def[1] contrast = np.zeros(len(reg_names)) for i, reg_name in enumerate(reg_names): diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index c4b3ece2f1..161bd9d322 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -7,7 +7,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os @@ -35,7 +36,6 @@ nipy_version = nipy.__version__ - class ComputeMaskInputSpec(BaseInterfaceInputSpec): mean_volume = File(exists=True, mandatory=True, desc="mean EPI image, used to compute the threshold for the mask") diff --git a/nipype/interfaces/nipy/utils.py b/nipype/interfaces/nipy/utils.py index 0b1c6a2091..9f789c9f09 100644 --- a/nipype/interfaces/nipy/utils.py +++ b/nipype/interfaces/nipy/utils.py @@ -7,7 +7,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import warnings import nibabel as nb @@ -26,7 +27,6 @@ from nipy.algorithms.registration.affine import Affine - class SimilarityInputSpec(BaseInterfaceInputSpec): volume1 = File(exists=True, desc="3D volume", mandatory=True) volume2 = File(exists=True, desc="3D volume", mandatory=True) diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index ee34271044..8b15ba307f 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -10,7 +10,8 @@ - nitime.viz.drawmatrix_channels """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import zip, object, open import numpy as np diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index d37fea4f4f..23e29fe331 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -21,7 +21,8 @@ def test_read_csv(): CA = nitime.CoherenceAnalyzer() CA.inputs.TR = 1.89 # bogus value just to pass traits test CA.inputs.in_file = example_data('fmri_timeseries_nolabels.csv') - with pytest.raises(ValueError): CA._read_csv() + with pytest.raises(ValueError): + CA._read_csv() CA.inputs.in_file = example_data('fmri_timeseries.csv') data, roi_names = CA._read_csv() @@ -72,6 +73,7 @@ def test_coherence_analysis(tmpdir): (C.frequencies < CA.inputs.frequency_range[1]))[0] # Extract the coherence and average across these frequency bands: - coh = np.mean(C.coherence[:, :, freq_idx], -1) # Averaging on the last dimension + # Averaging is done on the last dimension + coh = np.mean(C.coherence[:, :, freq_idx], -1) assert (o.outputs.coherence_array == coh).all() diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py index dbb3a25d1a..4bcef20627 100644 --- a/nipype/interfaces/petpvc.py +++ b/nipype/interfaces/petpvc.py @@ -8,7 +8,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os @@ -41,22 +42,22 @@ class PETPVCInputSpec(CommandLineInputSpec): - in_file = File(desc="PET image file", exists=True, mandatory=True, argstr="-i %s") - out_file = File(desc="Output file", genfile=True, hash_files=False, argstr="-o %s") + in_file = File(desc="PET image file", exists=True, mandatory=True, argstr="-i %s") + out_file = File(desc="Output file", genfile=True, hash_files=False, argstr="-o %s") mask_file = File(desc="Mask image file", exists=True, mandatory=True, argstr="-m %s") - pvc = traits.Enum(pvc_methods, desc="Desired PVC method", mandatory=True, argstr="-p %s") - fwhm_x = traits.Float(desc="The full-width at half maximum in mm along x-axis", mandatory=True, argstr="-x %.4f") - fwhm_y = traits.Float(desc="The full-width at half maximum in mm along y-axis", mandatory=True, argstr="-y %.4f") - fwhm_z = traits.Float(desc="The full-width at half maximum in mm along z-axis", mandatory=True, argstr="-z %.4f") - debug = traits.Bool (desc="Prints debug information", usedefault=True, default_value=False, argstr="-d") - n_iter = traits.Int (desc="Number of iterations", default_value=10, argstr="-n %d") - n_deconv = traits.Int (desc="Number of deconvolution iterations", default_value=10, argstr="-k %d") - alpha = traits.Float(desc="Alpha value", default_value=1.5, argstr="-a %.4f") + pvc = traits.Enum(pvc_methods, desc="Desired PVC method", mandatory=True, argstr="-p %s") + fwhm_x = traits.Float(desc="The full-width at half maximum in mm along x-axis", mandatory=True, argstr="-x %.4f") + fwhm_y = traits.Float(desc="The full-width at half maximum in mm along y-axis", mandatory=True, argstr="-y %.4f") + fwhm_z = traits.Float(desc="The full-width at half maximum in mm along z-axis", mandatory=True, argstr="-z %.4f") + debug = traits.Bool(desc="Prints debug information", usedefault=True, default_value=False, argstr="-d") + n_iter = traits.Int(desc="Number of iterations", default_value=10, argstr="-n %d") + n_deconv = traits.Int(desc="Number of deconvolution iterations", default_value=10, argstr="-k %d") + alpha = traits.Float(desc="Alpha value", default_value=1.5, argstr="-a %.4f") stop_crit = traits.Float(desc="Stopping criterion", default_value=0.01, argstr="-a %.4f") class PETPVCOutputSpec(TraitedSpec): - out_file = File(desc = "Output file") + out_file = File(desc="Output file") class PETPVC(CommandLine): @@ -149,9 +150,9 @@ class PETPVC(CommandLine): references_ = [{'entry': BibTeX("@article{0031-9155-61-22-7975," "author={Benjamin A Thomas and Vesna Cuplov and Alexandre Bousse and " - "Adriana Mendes and Kris Thielemans and Brian F Hutton and Kjell Erlandsson}," + "Adriana Mendes and Kris Thielemans and Brian F Hutton and Kjell Erlandsson}," "title={PETPVC: a toolbox for performing partial volume correction " - "techniques in positron emission tomography}," + "techniques in positron emission tomography}," "journal={Physics in Medicine and Biology}," "volume={61}," "number={22}," diff --git a/nipype/interfaces/semtools/brains/__init__.py b/nipype/interfaces/semtools/brains/__init__.py index 20c87b7dda..ebfab84bf3 100644 --- a/nipype/interfaces/semtools/brains/__init__.py +++ b/nipype/interfaces/semtools/brains/__init__.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import from .segmentation import SimilarityIndex, BRAINSTalairach, BRAINSTalairachMask -from .utilities import HistogramMatchingFilter, GenerateEdgeMapImage, GeneratePurePlugMask +from .utilities import (HistogramMatchingFilter, GenerateEdgeMapImage, + GeneratePurePlugMask) from .classify import BRAINSPosteriorToContinuousClass diff --git a/nipype/interfaces/semtools/brains/utilities.py b/nipype/interfaces/semtools/brains/utilities.py index 577975e10a..9ab429e7a4 100644 --- a/nipype/interfaces/semtools/brains/utilities.py +++ b/nipype/interfaces/semtools/brains/utilities.py @@ -47,6 +47,7 @@ class HistogramMatchingFilter(SEMLikeCommandLine): _outputs_filenames = {'outputVolume': 'outputVolume.nii'} _redirect_x = False + class GenerateEdgeMapImageInputSpec(CommandLineInputSpec): inputMRVolumes = InputMultiPath(File(exists=True), desc="List of input structural MR volumes to create the maximum edgemap", argstr="--inputMRVolumes %s...") inputMask = File(desc="Input mask file name. If set, image histogram percentiles will be calculated within the mask", exists=True, argstr="--inputMask %s") @@ -63,6 +64,7 @@ class GenerateEdgeMapImageOutputSpec(TraitedSpec): outputEdgeMap = File(desc="(required) output file name", exists=True) outputMaximumGradientImage = File(desc="output gradient image file name", exists=True) + class GenerateEdgeMapImage(SEMLikeCommandLine): """title: GenerateEdgeMapImage @@ -83,6 +85,7 @@ class GenerateEdgeMapImage(SEMLikeCommandLine): _outputs_filenames = {'outputEdgeMap': 'outputEdgeMap', 'outputMaximumGradientImage': 'outputMaximumGradientImage'} _redirect_x = False + class GeneratePurePlugMaskInputSpec(CommandLineInputSpec): inputImageModalities = InputMultiPath(File(exists=True), desc="List of input image file names to create pure plugs mask", argstr="--inputImageModalities %s...") threshold = traits.Float(desc="threshold value to define class membership", argstr="--threshold %f") @@ -93,6 +96,7 @@ class GeneratePurePlugMaskInputSpec(CommandLineInputSpec): class GeneratePurePlugMaskOutputSpec(TraitedSpec): outputMaskFile = File(desc="(required) Output binary mask file name", exists=True) + class GeneratePurePlugMask(SEMLikeCommandLine): """title: GeneratePurePlugMask diff --git a/nipype/interfaces/semtools/diffusion/__init__.py b/nipype/interfaces/semtools/diffusion/__init__.py index 100af56cbd..215cfa41d7 100644 --- a/nipype/interfaces/semtools/diffusion/__init__.py +++ b/nipype/interfaces/semtools/diffusion/__init__.py @@ -2,5 +2,14 @@ from __future__ import absolute_import from .diffusion import dtiaverage, dtiestim, dtiprocess, DWIConvert from .tractography import * -from .gtract import gtractTransformToDisplacementField, gtractInvertBSplineTransform, gtractConcatDwi, gtractAverageBvalues, gtractCoregBvalues, gtractResampleAnisotropy, gtractResampleCodeImage, gtractCopyImageOrientation, gtractCreateGuideFiber, gtractAnisotropyMap, gtractClipAnisotropy, gtractResampleB0, gtractInvertRigidTransform, gtractImageConformity, compareTractInclusion, gtractFastMarchingTracking, gtractInvertDisplacementField, gtractCoRegAnatomy, gtractResampleDWIInPlace, gtractCostFastMarching, gtractFiberTracking, extractNrrdVectorIndex, gtractResampleFibers, gtractTensor +from .gtract import ( + gtractTransformToDisplacementField, gtractInvertBSplineTransform, + gtractConcatDwi, gtractAverageBvalues, gtractCoregBvalues, + gtractResampleAnisotropy, gtractResampleCodeImage, + gtractCopyImageOrientation, gtractCreateGuideFiber, gtractAnisotropyMap, + gtractClipAnisotropy, gtractResampleB0, gtractInvertRigidTransform, + gtractImageConformity, compareTractInclusion, gtractFastMarchingTracking, + gtractInvertDisplacementField, gtractCoRegAnatomy, + gtractResampleDWIInPlace, gtractCostFastMarching, gtractFiberTracking, + extractNrrdVectorIndex, gtractResampleFibers, gtractTensor) from .maxcurvature import maxcurvature diff --git a/nipype/interfaces/semtools/filtering/__init__.py b/nipype/interfaces/semtools/filtering/__init__.py index ac69328f10..1e69233303 100644 --- a/nipype/interfaces/semtools/filtering/__init__.py +++ b/nipype/interfaces/semtools/filtering/__init__.py @@ -1,4 +1,10 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import from .denoising import UnbiasedNonLocalMeans -from .featuredetection import GenerateSummedGradientImage, CannySegmentationLevelSetImageFilter, DilateImage, TextureFromNoiseImageFilter, FlippedDifference, ErodeImage, GenerateBrainClippedImage, NeighborhoodMedian, GenerateTestImage, NeighborhoodMean, HammerAttributeCreator, TextureMeasureFilter, DilateMask, DumpBinaryTrainingVectors, DistanceMaps, STAPLEAnalysis, GradientAnisotropicDiffusionImageFilter, CannyEdge +from .featuredetection import ( + GenerateSummedGradientImage, CannySegmentationLevelSetImageFilter, + DilateImage, TextureFromNoiseImageFilter, FlippedDifference, ErodeImage, + GenerateBrainClippedImage, NeighborhoodMedian, GenerateTestImage, + NeighborhoodMean, HammerAttributeCreator, TextureMeasureFilter, DilateMask, + DumpBinaryTrainingVectors, DistanceMaps, STAPLEAnalysis, + GradientAnisotropicDiffusionImageFilter, CannyEdge) diff --git a/nipype/interfaces/semtools/registration/__init__.py b/nipype/interfaces/semtools/registration/__init__.py index 3b9a8916d5..33bd60ad59 100644 --- a/nipype/interfaces/semtools/registration/__init__.py +++ b/nipype/interfaces/semtools/registration/__init__.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -from .specialized import VBRAINSDemonWarp, BRAINSDemonWarp, BRAINSTransformFromFiducials +from .specialized import (VBRAINSDemonWarp, BRAINSDemonWarp, + BRAINSTransformFromFiducials) from .brainsresample import BRAINSResample from .brainsfit import BRAINSFit from .brainsresize import BRAINSResize diff --git a/nipype/interfaces/semtools/segmentation/__init__.py b/nipype/interfaces/semtools/segmentation/__init__.py index 9fbf33be3c..55f965246d 100644 --- a/nipype/interfaces/semtools/segmentation/__init__.py +++ b/nipype/interfaces/semtools/segmentation/__init__.py @@ -1,3 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -from .specialized import BRAINSCut, BRAINSROIAuto, BRAINSConstellationDetector, BRAINSCreateLabelMapFromProbabilityMaps, BinaryMaskEditorBasedOnLandmarks, BRAINSMultiSTAPLE, BRAINSABC, ESLR +from .specialized import (BRAINSCut, BRAINSROIAuto, + BRAINSConstellationDetector, + BRAINSCreateLabelMapFromProbabilityMaps, + BinaryMaskEditorBasedOnLandmarks, BRAINSMultiSTAPLE, + BRAINSABC, ESLR) diff --git a/nipype/interfaces/semtools/utilities/__init__.py b/nipype/interfaces/semtools/utilities/__init__.py index f647492fcc..8b18442d92 100644 --- a/nipype/interfaces/semtools/utilities/__init__.py +++ b/nipype/interfaces/semtools/utilities/__init__.py @@ -1,3 +1,12 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -from .brains import BRAINSConstellationModeler, landmarksConstellationWeights, BRAINSTrimForegroundInDirection, BRAINSLmkTransform, BRAINSMush, BRAINSTransformConvert, landmarksConstellationAligner, BRAINSEyeDetector, BRAINSLinearModelerEPCA, BRAINSInitializedControlPoints, CleanUpOverlapLabels, BRAINSClipInferior, GenerateLabelMapFromProbabilityMap, BRAINSAlignMSP, BRAINSLandmarkInitializer, insertMidACPCpoint, BRAINSSnapShotWriter, JointHistogram, ShuffleVectorsModule, ImageRegionPlotter +from .brains import (BRAINSConstellationModeler, landmarksConstellationWeights, + BRAINSTrimForegroundInDirection, BRAINSLmkTransform, + BRAINSMush, BRAINSTransformConvert, + landmarksConstellationAligner, BRAINSEyeDetector, + BRAINSLinearModelerEPCA, BRAINSInitializedControlPoints, + CleanUpOverlapLabels, BRAINSClipInferior, + GenerateLabelMapFromProbabilityMap, BRAINSAlignMSP, + BRAINSLandmarkInitializer, insertMidACPCpoint, + BRAINSSnapShotWriter, JointHistogram, + ShuffleVectorsModule, ImageRegionPlotter) diff --git a/nipype/interfaces/slicer/__init__.py b/nipype/interfaces/slicer/__init__.py index c6b929c41e..c7ffc42259 100644 --- a/nipype/interfaces/slicer/__init__.py +++ b/nipype/interfaces/slicer/__init__.py @@ -4,7 +4,8 @@ from .segmentation import * from .filtering import * from .utilities import EMSegmentTransformToNewFormat -from .surface import MergeModels, ModelToLabelMap, GrayscaleModelMaker, ProbeVolumeWithModel, LabelMapSmoothing, ModelMaker +from .surface import (MergeModels, ModelToLabelMap, GrayscaleModelMaker, + ProbeVolumeWithModel, LabelMapSmoothing, ModelMaker) from .quantification import * from .legacy import * from .registration import * diff --git a/nipype/interfaces/slicer/diffusion/__init__.py b/nipype/interfaces/slicer/diffusion/__init__.py index 85544b6594..f4b45badeb 100644 --- a/nipype/interfaces/slicer/diffusion/__init__.py +++ b/nipype/interfaces/slicer/diffusion/__init__.py @@ -1,3 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -from .diffusion import ResampleDTIVolume, DWIRicianLMMSEFilter, TractographyLabelMapSeeding, DWIJointRicianLMMSEFilter, DiffusionWeightedVolumeMasking, DTIimport, DWIToDTIEstimation, DiffusionTensorScalarMeasurements, DTIexport +from .diffusion import (ResampleDTIVolume, DWIRicianLMMSEFilter, + TractographyLabelMapSeeding, DWIJointRicianLMMSEFilter, + DiffusionWeightedVolumeMasking, DTIimport, + DWIToDTIEstimation, DiffusionTensorScalarMeasurements, + DTIexport) diff --git a/nipype/interfaces/slicer/filtering/__init__.py b/nipype/interfaces/slicer/filtering/__init__.py index d4dbd9e220..af0d50da34 100644 --- a/nipype/interfaces/slicer/filtering/__init__.py +++ b/nipype/interfaces/slicer/filtering/__init__.py @@ -1,8 +1,13 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -from .morphology import GrayscaleGrindPeakImageFilter, GrayscaleFillHoleImageFilter -from .denoising import GradientAnisotropicDiffusion, CurvatureAnisotropicDiffusion, GaussianBlurImageFilter, MedianImageFilter -from .arithmetic import MultiplyScalarVolumes, MaskScalarVolume, SubtractScalarVolumes, AddScalarVolumes, CastScalarVolume +from .morphology import (GrayscaleGrindPeakImageFilter, + GrayscaleFillHoleImageFilter) +from .denoising import (GradientAnisotropicDiffusion, + CurvatureAnisotropicDiffusion, + GaussianBlurImageFilter, MedianImageFilter) +from .arithmetic import (MultiplyScalarVolumes, MaskScalarVolume, + SubtractScalarVolumes, AddScalarVolumes, + CastScalarVolume) from .extractskeleton import ExtractSkeleton from .histogrammatching import HistogramMatching from .thresholdscalarvolume import ThresholdScalarVolume diff --git a/nipype/interfaces/slicer/generate_classes.py b/nipype/interfaces/slicer/generate_classes.py index f0bc8274bb..b50eafa0bc 100644 --- a/nipype/interfaces/slicer/generate_classes.py +++ b/nipype/interfaces/slicer/generate_classes.py @@ -3,7 +3,8 @@ modules are selected from the hardcoded list below and generated code is placed in the cli_modules.py file (and imported in __init__.py). For this to work correctly you must have your CLI executabes in $PATH""" -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, bytes, open import xml.dom.minidom import subprocess @@ -37,7 +38,8 @@ def add_class_to_package(class_codes, class_names, module_name, package_dir): f_m.write("""# -*- coding: utf-8 -*- \"\"\"Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.\"\"\"\n\n""") - imports = """from __future__ import print_function, division, unicode_literals, absolute_import + imports = """from __future__ import (print_function, division, unicode_literals, + absolute_import) from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath) import os\n\n\n""" diff --git a/nipype/interfaces/slicer/legacy/__init__.py b/nipype/interfaces/slicer/legacy/__init__.py index ee9d79b38c..75c6b9d327 100644 --- a/nipype/interfaces/slicer/legacy/__init__.py +++ b/nipype/interfaces/slicer/legacy/__init__.py @@ -4,4 +4,7 @@ from .segmentation import OtsuThresholdSegmentation from .filtering import OtsuThresholdImageFilter, ResampleScalarVolume from .converters import BSplineToDeformationField -from .registration import BSplineDeformableRegistration, AffineRegistration, MultiResolutionAffineRegistration, RigidRegistration, LinearRegistration, ExpertAutomatedRegistration +from .registration import (BSplineDeformableRegistration, AffineRegistration, + MultiResolutionAffineRegistration, + RigidRegistration, LinearRegistration, + ExpertAutomatedRegistration) diff --git a/nipype/interfaces/slicer/registration/__init__.py b/nipype/interfaces/slicer/registration/__init__.py index f19c1faff7..375b9b5416 100644 --- a/nipype/interfaces/slicer/registration/__init__.py +++ b/nipype/interfaces/slicer/registration/__init__.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -from .specialized import ACPCTransform, FiducialRegistration, VBRAINSDemonWarp, BRAINSDemonWarp +from .specialized import (ACPCTransform, FiducialRegistration, + VBRAINSDemonWarp, BRAINSDemonWarp) from .brainsresample import BRAINSResample from .brainsfit import BRAINSFit diff --git a/nipype/interfaces/slicer/segmentation/__init__.py b/nipype/interfaces/slicer/segmentation/__init__.py index d4ebe74d7b..d966f07e27 100644 --- a/nipype/interfaces/slicer/segmentation/__init__.py +++ b/nipype/interfaces/slicer/segmentation/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -from .specialized import RobustStatisticsSegmenter, EMSegmentCommandLine, BRAINSROIAuto +from .specialized import (RobustStatisticsSegmenter, EMSegmentCommandLine, + BRAINSROIAuto) from .simpleregiongrowingsegmentation import SimpleRegionGrowingSegmentation diff --git a/nipype/interfaces/spm/base.py b/nipype/interfaces/spm/base.py index 7882fa1280..7b1480e4b5 100644 --- a/nipype/interfaces/spm/base.py +++ b/nipype/interfaces/spm/base.py @@ -14,7 +14,8 @@ spm.SPMCommand().version """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, object, str, bytes # Standard library imports @@ -270,16 +271,17 @@ class SPMCommand(BaseInterface): _paths = None _use_mcr = None - references_ = [{'entry': BibTeX("@book{FrackowiakFristonFrithDolanMazziotta1997," - "author={R.S.J. Frackowiak, K.J. Friston, C.D. Frith, R.J. Dolan, and J.C. Mazziotta}," - "title={Human Brain Function}," - "publisher={Academic Press USA}," - "year={1997}," - "}"), - 'description': 'The fundamental text on Statistical Parametric Mapping (SPM)', - # 'path': "nipype.interfaces.spm", - 'tags': ['implementation'], - }] + references_ = [{'entry': BibTeX( + "@book{FrackowiakFristonFrithDolanMazziotta1997," + "author={R.S.J. Frackowiak, K.J. Friston, C.D. Frith, R.J. Dolan, and J.C. Mazziotta}," + "title={Human Brain Function}," + "publisher={Academic Press USA}," + "year={1997}," + "}"), + 'description': 'The fundamental text on Statistical Parametric Mapping (SPM)', + # 'path': "nipype.interfaces.spm", + 'tags': ['implementation'], + }] def __init__(self, **inputs): super(SPMCommand, self).__init__(**inputs) @@ -472,8 +474,8 @@ def _generate_job(self, prefix='', contents=None): elif isinstance(val, list): items_format = [] for el in val: - items_format += ['{}' if not isinstance(el, (str, bytes)) - else '\'{}\''] + items_format += ['{}' if not isinstance( + el, (str, bytes)) else '\'{}\''] val_format = ', '.join(items_format).format jobstring += '[{}];...\n'.format(val_format(*val)) elif isinstance(val, (str, bytes)): @@ -570,6 +572,7 @@ def _make_matlab_command(self, contents, postscript=None): mscript += postscript return mscript + class ImageFileSPM(ImageFile): """ Defines an ImageFile trait specific to SPM interfaces. diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index 8ddc06a9b0..f31c630b08 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -11,7 +11,8 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, bytes # Standard library imports @@ -179,38 +180,38 @@ def _list_outputs(self): class EstimateModelInputSpec(SPMCommandInputSpec): spm_mat_file = File(exists=True, field='spmmat', - copyfile=True, mandatory=True, - desc='Absolute path to SPM.mat') + copyfile=True, mandatory=True, + desc='Absolute path to SPM.mat') estimation_method = traits.Dict( traits.Enum('Classical', 'Bayesian2', 'Bayesian'), field='method', mandatory=True, desc=('Dictionary of either Classical: 1, Bayesian: 1, ' 'or Bayesian2: 1 (dict)')) write_residuals = traits.Bool(field='write_residuals', - desc="Write individual residual images") + desc="Write individual residual images") flags = traits.Dict(desc='Additional arguments') class EstimateModelOutputSpec(TraitedSpec): mask_image = ImageFileSPM(exists=True, - desc='binary mask to constrain estimation') + desc='binary mask to constrain estimation') beta_images = OutputMultiPath(ImageFileSPM(exists=True), - desc='design parameter estimates') + desc='design parameter estimates') residual_image = ImageFileSPM(exists=True, - desc='Mean-squared image of the residuals') + desc='Mean-squared image of the residuals') residual_images = OutputMultiPath(ImageFileSPM(exists=True), - desc="individual residual images (requires `write_residuals`") + desc="individual residual images (requires `write_residuals`") RPVimage = ImageFileSPM(exists=True, desc='Resels per voxel image') spm_mat_file = File(exists=True, desc='Updated SPM mat file') labels = ImageFileSPM(exists=True, desc="label file") SDerror = OutputMultiPath(ImageFileSPM(exists=True), - desc="Images of the standard deviation of the error") + desc="Images of the standard deviation of the error") ARcoef = OutputMultiPath(ImageFileSPM(exists=True), - desc="Images of the AR coefficient") + desc="Images of the AR coefficient") Cbetas = OutputMultiPath(ImageFileSPM(exists=True), - desc="Images of the parameter posteriors") + desc="Images of the parameter posteriors") SDbetas = OutputMultiPath(ImageFileSPM(exists=True), - desc="Images of the standard deviation of parameter posteriors") + desc="Images of the standard deviation of parameter posteriors") class EstimateModel(SPMCommand): @@ -259,30 +260,29 @@ def _list_outputs(self): betas = [vbeta.fname[0] for vbeta in spm['SPM'][0, 0].Vbeta[0]] if ('Bayesian' in self.inputs.estimation_method.keys() or - 'Bayesian2' in self.inputs.estimation_method.keys()): + 'Bayesian2' in self.inputs.estimation_method.keys()): outputs['labels'] = os.path.join(pth, - 'labels.{}'.format(outtype)) + 'labels.{}'.format(outtype)) outputs['SDerror'] = glob(os.path.join(pth, 'Sess*_SDerror*')) outputs['ARcoef'] = glob(os.path.join(pth, 'Sess*_AR_*')) if betas: outputs['Cbetas'] = [os.path.join(pth, 'C{}'.format(beta)) - for beta in betas] + for beta in betas] outputs['SDbetas'] = [os.path.join(pth, 'SD{}'.format(beta)) - for beta in betas] + for beta in betas] if 'Classical' in self.inputs.estimation_method.keys(): outputs['residual_image'] = os.path.join(pth, - 'ResMS.{}'.format(outtype)) - outputs['RPVimage'] = os.path.join(pth, - 'RPV.{}'.format(outtype)) + 'ResMS.{}'.format(outtype)) + outputs['RPVimage'] = os.path.join(pth, + 'RPV.{}'.format(outtype)) if self.inputs.write_residuals: outputs['residual_images'] = glob(os.path.join(pth, 'Res_*')) if betas: outputs['beta_images'] = [os.path.join(pth, beta) - for beta in betas] + for beta in betas] - outputs['mask_image'] = os.path.join(pth, - 'mask.{}'.format(outtype)) + outputs['mask_image'] = os.path.join(pth, 'mask.{}'.format(outtype)) outputs['spm_mat_file'] = os.path.join(pth, 'SPM.mat') return outputs diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index 0eebf3c6b8..93349fa279 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -9,7 +9,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range @@ -30,12 +31,11 @@ __docformat__ = 'restructuredtext' - class SliceTimingInputSpec(SPMCommandInputSpec): in_files = InputMultiPath(traits.Either(traits.List(ImageFileSPM( exists=True)), ImageFileSPM(exists=True)), - field='scans', + field='scans', desc='list of filenames to apply slice timing', mandatory=True, copyfile=False) num_slices = traits.Int(field='nslices', @@ -259,7 +259,7 @@ def _list_outputs(self): if resliced_all: outputs['realigned_files'] = [] for idx, imgf in enumerate( - filename_to_list(self.inputs.in_files)): + filename_to_list(self.inputs.in_files)): realigned_run = [] if isinstance(imgf, list): for i, inner_imgf in enumerate(filename_to_list(imgf)): @@ -275,8 +275,8 @@ def _list_outputs(self): class CoregisterInputSpec(SPMCommandInputSpec): target = ImageFileSPM(exists=True, mandatory=True, - field='ref', desc='reference file to register to', - copyfile=False) + field='ref', desc='reference file to register to', + copyfile=False) source = InputMultiPath(ImageFileSPM(exists=True), field='source', desc='file to register to target', copyfile=True, mandatory=True) @@ -561,10 +561,10 @@ def _list_outputs(self): class Normalize12InputSpec(SPMCommandInputSpec): image_to_align = ImageFileSPM(exists=True, field='subj.vol', - desc=('file to estimate normalization parameters ' - 'with'), - xor=['deformation_file'], - mandatory=True, copyfile=True) + desc=('file to estimate normalization parameters ' + 'with'), + xor=['deformation_file'], + mandatory=True, copyfile=True) apply_to_files = InputMultiPath( traits.Either(ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True))), @@ -572,10 +572,10 @@ class Normalize12InputSpec(SPMCommandInputSpec): desc='files to apply transformation to', copyfile=True) deformation_file = ImageFileSPM(field='subj.def', mandatory=True, - xor=['image_to_align', 'tpm'], copyfile=False, - desc=('file y_*.nii containing 3 deformation ' - 'fields for the deformation in x, y and z ' - 'dimension')) + xor=['image_to_align', 'tpm'], copyfile=False, + desc=('file y_*.nii containing 3 deformation ' + 'fields for the deformation in x, y and z ' + 'dimension')) jobtype = traits.Enum('estwrite', 'est', 'write', usedefault=True, desc='Estimate, Write or do Both') bias_regularization = traits.Enum(0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1, @@ -906,7 +906,7 @@ class NewSegmentInputSpec(SPMCommandInputSpec): - which maps to save (Corrected, Field) - a tuple of two boolean values""", field='channel') tissues = traits.List( - traits.Tuple(traits.Tuple(ImageFileSPM(exists=True),traits.Int()), + traits.Tuple(traits.Tuple(ImageFileSPM(exists=True), traits.Int()), traits.Int(), traits.Tuple(traits.Bool, traits.Bool), traits.Tuple(traits.Bool, traits.Bool)), desc="""A list of tuples (one per tissue) with the following fields: @@ -1277,7 +1277,7 @@ def _list_outputs(self): class DARTELNorm2MNIInputSpec(SPMCommandInputSpec): template_file = ImageFileSPM(exists=True, copyfile=False, mandatory=True, - desc="DARTEL template", field='mni_norm.template') + desc="DARTEL template", field='mni_norm.template') flowfield_files = InputMultiPath(ImageFileSPM(exists=True), mandatory=True, desc="DARTEL flow fields u_rc1*", field='mni_norm.data.subjs.flowfields') @@ -1443,7 +1443,7 @@ class ApplyDeformationFieldInputSpec(SPMCommandInputSpec): mandatory=True, field='fnames') deformation_field = File(exists=True, mandatory=True, field='comp{1}.def') reference_volume = ImageFileSPM(exists=True, mandatory=True, - field='comp{2}.id.space') + field='comp{2}.id.space') interp = traits.Range(low=0, high=7, field='interp', desc='degree of b-spline used for interpolation') diff --git a/nipype/interfaces/spm/tests/test_base.py b/nipype/interfaces/spm/tests/test_base.py index 57d0d88c21..9f31130f04 100644 --- a/nipype/interfaces/spm/tests/test_base.py +++ b/nipype/interfaces/spm/tests/test_base.py @@ -56,17 +56,17 @@ class TestClass(spm.SPMCommand): except KeyError: pass dc = TestClass() - assert dc._use_mcr == None - assert dc._matlab_cmd == None + assert dc._use_mcr is None + assert dc._matlab_cmd is None # test with only FORCE_SPMMCR set os.environ['FORCE_SPMMCR'] = '1' dc = TestClass() - assert dc._use_mcr == True - assert dc._matlab_cmd == None + assert dc._use_mcr + assert dc._matlab_cmd is None # test with both, FORCE_SPMMCR and SPMMCRCMD set os.environ['SPMMCRCMD'] = 'spmcmd' dc = TestClass() - assert dc._use_mcr == True + assert dc._use_mcr assert dc._matlab_cmd == 'spmcmd' # restore environment os.environ.clear() diff --git a/nipype/interfaces/spm/tests/test_utils.py b/nipype/interfaces/spm/tests/test_utils.py index 7d8106f80c..a574fb90a7 100644 --- a/nipype/interfaces/spm/tests/test_utils.py +++ b/nipype/interfaces/spm/tests/test_utils.py @@ -18,7 +18,7 @@ def test_coreg(): coreg.inputs.target = target assert coreg.inputs.matlab_cmd == 'mymatlab' coreg.inputs.moving = moving - assert isdefined(coreg.inputs.mat) == False + assert not isdefined(coreg.inputs.mat) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) mat = os.path.join(pth, '%s_to_%s.mat' % (mov, tgt)) @@ -50,8 +50,10 @@ def test_reslice(): reslice.inputs.in_file = moving reslice.inputs.space_defining = space_defining assert reslice.inputs.interp == 0 - with pytest.raises(TraitError): reslice.inputs.trait_set(interp='nearest') - with pytest.raises(TraitError): reslice.inputs.trait_set(interp=10) + with pytest.raises(TraitError): + reslice.inputs.trait_set(interp='nearest') + with pytest.raises(TraitError): + reslice.inputs.trait_set(interp=10) reslice.inputs.interp = 1 script = reslice._make_matlab_command(None) outfile = fname_presuffix(moving, prefix='r') @@ -70,9 +72,12 @@ def test_dicom_import(): assert di.inputs.output_dir_struct == 'flat' assert di.inputs.output_dir == './converted_dicom' assert di.inputs.format == 'nii' - assert di.inputs.icedims == False - with pytest.raises(TraitError): di.inputs.trait_set(output_dir_struct='wrong') - with pytest.raises(TraitError): di.inputs.trait_set(format='FAT') - with pytest.raises(TraitError): di.inputs.trait_set(in_files=['does_sfd_not_32fn_exist.dcm']) + assert not di.inputs.icedims + with pytest.raises(TraitError): + di.inputs.trait_set(output_dir_struct='wrong') + with pytest.raises(TraitError): + di.inputs.trait_set(format='FAT') + with pytest.raises(TraitError): + di.inputs.trait_set(in_files=['does_sfd_not_32fn_exist.dcm']) di.inputs.in_files = [dicom] assert di.inputs.in_files == [dicom] diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py index 5c91e1313b..e79be0d514 100644 --- a/nipype/interfaces/spm/utils.py +++ b/nipype/interfaces/spm/utils.py @@ -1,14 +1,18 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import numpy as np -from ...utils.filemanip import split_filename, fname_presuffix, filename_to_list, list_to_filename -from ..base import TraitedSpec, isdefined, File, traits, OutputMultiPath, InputMultiPath -from .base import SPMCommandInputSpec, SPMCommand, scans_for_fnames, scans_for_fname +from ...utils.filemanip import (split_filename, fname_presuffix, + filename_to_list, list_to_filename) +from ..base import (TraitedSpec, isdefined, File, traits, OutputMultiPath, + InputMultiPath) +from .base import (SPMCommandInputSpec, SPMCommand, scans_for_fnames, + scans_for_fname) class Analyze2niiInputSpec(SPMCommandInputSpec): diff --git a/nipype/interfaces/tests/test_bids.py b/nipype/interfaces/tests/test_bids.py index aa5bc6c359..2ba09f5acf 100644 --- a/nipype/interfaces/tests/test_bids.py +++ b/nipype/interfaces/tests/test_bids.py @@ -42,7 +42,7 @@ def test_bids_grabber(tmpdir): reason="Pybids is not installed in editable mode") def test_bids_fields(tmpdir): tmpdir.chdir() - bg = BIDSDataGrabber(infields = ['subject'], outfields = ['dwi']) + bg = BIDSDataGrabber(infields=['subject'], outfields=['dwi']) bg.inputs.base_dir = os.path.join(datadir, 'ds005') bg.inputs.subject = '01' bg.inputs.output_query['dwi'] = dict(modality='dwi') diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py index ff56c9ec9d..552e0eb28a 100644 --- a/nipype/interfaces/tests/test_io.py +++ b/nipype/interfaces/tests/test_io.py @@ -60,28 +60,29 @@ def test_s3datagrabber(): templates1 = {"model": "interfaces/{package}/model.py", - "preprocess": "interfaces/{package}/pre*.py"} + "preprocess": "interfaces/{package}/pre*.py"} templates2 = {"converter": "interfaces/dcm{to!s}nii.py"} templates3 = {"model": "interfaces/{package.name}/model.py"} + @pytest.mark.parametrize("SF_args, inputs_att, expected", [ - ({"templates":templates1}, {"package":"fsl"}, - {"infields":["package"], "outfields":["model", "preprocess"], "run_output":{"model":op.join(op.dirname(nipype.__file__),"interfaces/fsl/model.py"), "preprocess":op.join(op.dirname(nipype.__file__),"interfaces/fsl/preprocess.py")}, "node_output":["model", "preprocess"]}), + ({"templates": templates1}, {"package": "fsl"}, + {"infields": ["package"], "outfields":["model", "preprocess"], "run_output":{"model": op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py"), "preprocess": op.join(op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py")}, "node_output": ["model", "preprocess"]}), - ({"templates":templates1, "force_lists":True}, {"package":"spm"}, - {"infields":["package"], "outfields":["model", "preprocess"], "run_output":{"model":[op.join(op.dirname(nipype.__file__),"interfaces/spm/model.py")], "preprocess":[op.join(op.dirname(nipype.__file__),"interfaces/spm/preprocess.py")]}, "node_output":["model", "preprocess"]}), + ({"templates": templates1, "force_lists": True}, {"package": "spm"}, + {"infields": ["package"], "outfields":["model", "preprocess"], "run_output":{"model": [op.join(op.dirname(nipype.__file__), "interfaces/spm/model.py")], "preprocess":[op.join(op.dirname(nipype.__file__), "interfaces/spm/preprocess.py")]}, "node_output":["model", "preprocess"]}), - ({"templates":templates1}, {"package":"fsl", "force_lists":["model"]}, - {"infields":["package"], "outfields":["model", "preprocess"], "run_output":{"model":[op.join(op.dirname(nipype.__file__),"interfaces/fsl/model.py")], "preprocess":op.join(op.dirname(nipype.__file__),"interfaces/fsl/preprocess.py")}, "node_output":["model", "preprocess"]}), + ({"templates": templates1}, {"package": "fsl", "force_lists": ["model"]}, + {"infields": ["package"], "outfields":["model", "preprocess"], "run_output":{"model": [op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py")], "preprocess":op.join(op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py")}, "node_output":["model", "preprocess"]}), - ({"templates":templates2}, {"to":2}, - {"infields":["to"], "outfields":["converter"], "run_output":{"converter":op.join(op.dirname(nipype.__file__), "interfaces/dcm2nii.py")}, "node_output":["converter"]}), + ({"templates": templates2}, {"to": 2}, + {"infields": ["to"], "outfields":["converter"], "run_output":{"converter": op.join(op.dirname(nipype.__file__), "interfaces/dcm2nii.py")}, "node_output": ["converter"]}), - ({"templates": templates3}, {"package": namedtuple("package", ["name"])("fsl")}, - {"infields": ["package"], "outfields": ["model"], - "run_output": {"model": op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py")}, - "node_output": ["model"]}), - ]) + ({"templates": templates3}, {"package": namedtuple("package", ["name"])("fsl")}, + {"infields": ["package"], "outfields": ["model"], + "run_output": {"model": op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py")}, + "node_output": ["model"]}), +]) def test_selectfiles(SF_args, inputs_att, expected): base_dir = op.dirname(nipype.__file__) dg = nio.SelectFiles(base_directory=base_dir, **SF_args) @@ -155,11 +156,11 @@ def test_datagrabber_order(tmpdir): res = dg.run() outfiles = res.outputs.outfiles - assert 'sub002_L1_R1' in outfiles[0][0] - assert 'sub002_L1_R2' in outfiles[0][1] - assert 'sub002_L2_R1' in outfiles[1][0] - assert 'sub002_L2_R2' in outfiles[1][1] - assert 'sub002_L3_R2' in outfiles[2][0] + assert 'sub002_L1_R1' in outfiles[0][0] + assert 'sub002_L1_R2' in outfiles[0][1] + assert 'sub002_L2_R1' in outfiles[1][0] + assert 'sub002_L2_R2' in outfiles[1][1] + assert 'sub002_L3_R2' in outfiles[2][0] assert 'sub002_L3_R10' in outfiles[2][1] @@ -332,8 +333,9 @@ def test_datasink_substitutions(tmpdir): setattr(ds.inputs, '@outdir', files) ds.run() assert sorted([os.path.basename(x) for - x in glob.glob(os.path.join(str(outdir), '*'))]) \ - == ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns + x in glob.glob(os.path.join(str(outdir), '*'))]) \ + == ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns + @pytest.fixture() def _temp_analyze_files(tmpdir): @@ -356,6 +358,7 @@ def test_datasink_copydir_1(_temp_analyze_files, tmpdir): sep = os.path.sep assert tmpdir.join('basedir', pth.split(sep)[-1], fname).check() + def test_datasink_copydir_2(_temp_analyze_files, tmpdir): orig_img, orig_hdr = _temp_analyze_files pth, fname = os.path.split(orig_img) @@ -420,8 +423,8 @@ def test_jsonsink_input(): @pytest.mark.parametrize("inputs_attributes", [ - {'new_entry' : 'someValue'}, - {'new_entry' : 'someValue', 'test' : 'testInfields'} + {'new_entry': 'someValue'}, + {'new_entry': 'someValue', 'test': 'testInfields'} ]) def test_jsonsink(tmpdir, inputs_attributes): tmpdir.chdir() @@ -437,6 +440,3 @@ def test_jsonsink(tmpdir, inputs_attributes): data = simplejson.load(f) assert data == expected_data - - - diff --git a/nipype/interfaces/tests/test_nilearn.py b/nipype/interfaces/tests/test_nilearn.py index ce3846a6d3..961c523020 100644 --- a/nipype/interfaces/tests/test_nilearn.py +++ b/nipype/interfaces/tests/test_nilearn.py @@ -19,6 +19,7 @@ except ImportError: pass + @pytest.mark.skipif(no_nilearn, reason="the nilearn library is not available") class TestSignalExtraction(): @@ -38,7 +39,6 @@ def setup_class(self, tmpdir_factory): utils.save_toy_nii(self.fake_fmri_data, self.filenames['in_file']) utils.save_toy_nii(self.fake_label_data, self.filenames['label_files']) - def test_signal_extract_no_shared(self): # run iface.SignalExtraction(in_file=self.filenames['in_file'], @@ -48,7 +48,6 @@ def test_signal_extract_no_shared(self): # assert self.assert_expected_output(self.labels, self.base_wanted) - def test_signal_extr_bad_label_list(self): # run with pytest.raises(ValueError): @@ -63,7 +62,8 @@ def test_signal_extr_equiv_4d_no_shared(self): def test_signal_extr_4d_no_shared(self): # set up & run & assert - self._test_4d_label(self.fourd_wanted, self.fake_4d_label_data, incl_shared_variance=False) + self._test_4d_label(self.fourd_wanted, self.fake_4d_label_data, + incl_shared_variance=False) def test_signal_extr_global_no_shared(self): # set up @@ -105,23 +105,24 @@ def test_signal_extr_shared(self): # run & assert self._test_4d_label(wanted, self.fake_4d_label_data) - def test_signal_extr_traits_valid(self): ''' Test a node using the SignalExtraction interface. Unlike interface.run(), node.run() checks the traits ''' # run - node = pe.Node(iface.SignalExtraction(in_file=os.path.abspath(self.filenames['in_file']), - label_files=os.path.abspath(self.filenames['label_files']), - class_labels=self.labels, - incl_shared_variance=False), - name='SignalExtraction') + node = pe.Node(iface.SignalExtraction( + in_file=os.path.abspath(self.filenames['in_file']), + label_files=os.path.abspath(self.filenames['label_files']), + class_labels=self.labels, + incl_shared_variance=False), + name='SignalExtraction') node.run() # assert # just checking that it passes trait validations - def _test_4d_label(self, wanted, fake_labels, include_global=False, incl_shared_variance=True): + def _test_4d_label(self, wanted, fake_labels, include_global=False, + incl_shared_variance=True): # set up utils.save_toy_nii(fake_labels, self.filenames['4d_label_file']) @@ -140,9 +141,9 @@ def _test_4d_label(self, wanted, fake_labels, include_global=False, incl_shared_ def assert_expected_output(self, labels, wanted): with open(self.filenames['out_file'], 'r') as output: got = [line.split() for line in output] - labels_got = got.pop(0) # remove header + labels_got = got.pop(0) # remove header assert labels_got == labels - assert len(got) == self.fake_fmri_data.shape[3],'num rows and num volumes' + assert len(got) == self.fake_fmri_data.shape[3], 'num rows and num volumes' # convert from string to float got = [[float(num) for num in row] for row in got] for i, time in enumerate(got): @@ -150,11 +151,11 @@ def assert_expected_output(self, labels, wanted): for j, segment in enumerate(time): npt.assert_almost_equal(segment, wanted[i][j], decimal=1) -#dj: self doesnt have orig_dir at this point, not sure how to change it. should work without it +# dj: self doesnt have orig_dir at this point, not sure how to change it. +# should work without it # def teardown_class(self): # self.orig_dir.chdir() - fake_fmri_data = np.array([[[[2, -1, 4, -2, 3], [4, -2, -5, -1, 0]], @@ -201,7 +202,6 @@ def assert_expected_output(self, labels, wanted): [[0.3, 0.3, 0.4], [0.3, 0.4, 0.3]]]]) - fourd_wanted = [[-5.0652173913, -5.44565217391, 5.50543478261], [-7.02173913043, 11.1847826087, -4.33152173913], [-19.0869565217, 21.2391304348, -4.57608695652], diff --git a/nipype/interfaces/utility/base.py b/nipype/interfaces/utility/base.py index ec744d9fce..7508978790 100644 --- a/nipype/interfaces/utility/base.py +++ b/nipype/interfaces/utility/base.py @@ -6,7 +6,8 @@ >>> tmp = getfixture('tmpdir') >>> old = tmp.chdir() """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range from future import standard_library diff --git a/nipype/interfaces/utility/csv.py b/nipype/interfaces/utility/csv.py index 0529a184a6..8d981ebca2 100644 --- a/nipype/interfaces/utility/csv.py +++ b/nipype/interfaces/utility/csv.py @@ -11,19 +11,23 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import zip, range, str, open from future import standard_library standard_library.install_aliases() -from ..base import traits, TraitedSpec, DynamicTraitedSpec, File, BaseInterface +from ..base import (traits, TraitedSpec, DynamicTraitedSpec, File, + BaseInterface) from ..io import add_traits class CSVReaderInputSpec(DynamicTraitedSpec, TraitedSpec): - in_file = File(exists=True, mandatory=True, desc='Input comma-seperated value (CSV) file') - header = traits.Bool(False, usedefault=True, desc='True if the first line is a column header') + in_file = File(exists=True, mandatory=True, + desc='Input comma-seperated value (CSV) file') + header = traits.Bool(False, usedefault=True, + desc='True if the first line is a column header') class CSVReader(BaseInterface): @@ -73,7 +77,8 @@ def _get_outfields(self): if self.inputs.header: self._outfields = tuple(entry) else: - self._outfields = tuple(['column_' + str(x) for x in range(len(entry))]) + self._outfields = tuple(['column_' + str(x) + for x in range(len(entry))]) return self._outfields def _run_interface(self, runtime): diff --git a/nipype/interfaces/utility/tests/test_base.py b/nipype/interfaces/utility/tests/test_base.py index 3e66f827d2..5561cf82b7 100644 --- a/nipype/interfaces/utility/tests/test_base.py +++ b/nipype/interfaces/utility/tests/test_base.py @@ -22,7 +22,9 @@ def test_rename(tmpdir): assert os.path.exists(outfile) # Now a string-formatting version - rn = utility.Rename(in_file="file.txt", format_string="%(field1)s_file%(field2)d", keep_ext=True) + rn = utility.Rename( + in_file="file.txt", format_string="%(field1)s_file%(field2)d", + keep_ext=True) # Test .input field creation assert hasattr(rn.inputs, "field1") assert hasattr(rn.inputs, "field2") @@ -37,8 +39,8 @@ def test_rename(tmpdir): @pytest.mark.parametrize("args, expected", [ - ({} , ([0], [1,2,3])), - ({"squeeze" : True}, (0 , [1,2,3])) + ({}, ([0], [1, 2, 3])), + ({"squeeze": True}, (0, [1, 2, 3])) ]) def test_split(tmpdir, args, expected): tmpdir.chdir() diff --git a/nipype/interfaces/utility/tests/test_wrappers.py b/nipype/interfaces/utility/tests/test_wrappers.py index b995dc27ad..ab545e076a 100644 --- a/nipype/interfaces/utility/tests/test_wrappers.py +++ b/nipype/interfaces/utility/tests/test_wrappers.py @@ -15,6 +15,7 @@ def concat_sort(in_arrays): return np.sort(all_vals) """ + def test_function(tmpdir): tmpdir.chdir() @@ -22,7 +23,10 @@ def gen_random_array(size): import numpy as np return np.random.rand(size, size) - f1 = pe.MapNode(utility.Function(input_names=['size'], output_names=['random_array'], function=gen_random_array), name='random_array', iterfield=['size']) + f1 = pe.MapNode( + utility.Function(input_names=['size'], output_names=['random_array'], + function=gen_random_array), + name='random_array', iterfield=['size']) f1.inputs.size = [2, 3, 5] wf = pe.Workflow(name="test_workflow") @@ -30,7 +34,9 @@ def gen_random_array(size): def increment_array(in_array): return in_array + 1 - f2 = pe.MapNode(utility.Function(function=increment_array), name='increment_array', iterfield=['in_array']) + f2 = pe.MapNode( + utility.Function(function=increment_array), + name='increment_array', iterfield=['in_array']) wf.connect(f1, 'random_array', f2, 'in_array') @@ -92,19 +98,20 @@ def _sum_and_sub_mul(a, b, c): def _inc(x): return x + 1 - params = pe.Node(utility.IdentityInterface(fields=['size', 'num']), name='params') - params.inputs.num = 42 + params = pe.Node(utility.IdentityInterface(fields=['size', 'num']), + name='params') + params.inputs.num = 42 params.inputs.size = 1 gen_tuple = pe.Node(utility.Function(input_names=['size'], output_names=['tuple'], function=_gen_tuple), - name='gen_tuple') + name='gen_tuple') ssm = pe.Node(utility.Function(input_names=['a', 'b', 'c'], output_names=['sum', 'sub'], function=_sum_and_sub_mul), - name='sum_and_sub_mul') + name='sum_and_sub_mul') split = pe.Node(utility.Split(splits=[1, 1], squeeze=True), @@ -116,7 +123,7 @@ def _inc(x): (gen_tuple, split, [("tuple", "inlist")]), (split, ssm, [(("out1", _inc), "a"), ("out2", "b"), - ]), + ]), ]) wf.run() diff --git a/nipype/interfaces/utility/wrappers.py b/nipype/interfaces/utility/wrappers.py index 9999c4af6a..aae94f63cc 100644 --- a/nipype/interfaces/utility/wrappers.py +++ b/nipype/interfaces/utility/wrappers.py @@ -7,7 +7,8 @@ >>> old = tmp.chdir() """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from future import standard_library standard_library.install_aliases() @@ -23,6 +24,7 @@ iflogger = logging.getLogger('interface') + class FunctionInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): function_str = traits.Str(mandatory=True, desc='code for function') @@ -138,7 +140,8 @@ def _run_interface(self, runtime): if len(self._output_names) == 1: self._out[self._output_names[0]] = out else: - if isinstance(out, tuple) and (len(out) != len(self._output_names)): + if isinstance(out, tuple) and \ + (len(out) != len(self._output_names)): raise RuntimeError('Mismatch in number of expected outputs') else: diff --git a/nipype/interfaces/vista/vista.py b/nipype/interfaces/vista/vista.py index e8928ae24a..2b1d63fbcb 100644 --- a/nipype/interfaces/vista/vista.py +++ b/nipype/interfaces/vista/vista.py @@ -9,16 +9,22 @@ >>> os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from ..base import CommandLineInputSpec, CommandLine, TraitedSpec, File class Vnifti2ImageInputSpec(CommandLineInputSpec): - in_file = File(exists=True, argstr='-in %s', mandatory=True, position=1, desc='in file') - attributes = File(exists=True, argstr='-attr %s', position=2, desc='attribute file') - out_file = File(name_template="%s.v", keep_extension=False, argstr='-out %s', hash_files=False, - position=-1, desc='output data file', name_source=["in_file"]) + in_file = File( + exists=True, argstr='-in %s', mandatory=True, position=1, + desc='in file') + attributes = File( + exists=True, argstr='-attr %s', position=2, desc='attribute file') + out_file = File( + name_template="%s.v", keep_extension=False, argstr='-out %s', + hash_files=False, position=-1, desc='output data file', + name_source=["in_file"]) class Vnifti2ImageOutputSpec(TraitedSpec): @@ -45,9 +51,13 @@ class Vnifti2Image(CommandLine): class VtoMatInputSpec(CommandLineInputSpec): - in_file = File(exists=True, argstr='-in %s', mandatory=True, position=1, desc='in file') - out_file = File(name_template="%s.mat", keep_extension=False, argstr='-out %s', hash_files=False, - position=-1, desc='output mat file', name_source=["in_file"]) + in_file = File( + exists=True, argstr='-in %s', mandatory=True, position=1, + desc='in file') + out_file = File( + name_template="%s.mat", keep_extension=False, argstr='-out %s', + hash_files=False, position=-1, desc='output mat file', + name_source=["in_file"]) class VtoMatOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/vtkbase.py b/nipype/interfaces/vtkbase.py index 458d4fa7b5..c6f6a64305 100644 --- a/nipype/interfaces/vtkbase.py +++ b/nipype/interfaces/vtkbase.py @@ -6,7 +6,8 @@ Code using tvtk should import it through this module """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from .. import logging diff --git a/nipype/pipeline/__init__.py b/nipype/pipeline/__init__.py index f4e865980c..badfda5ba0 100644 --- a/nipype/pipeline/__init__.py +++ b/nipype/pipeline/__init__.py @@ -5,6 +5,7 @@ Package contains modules for generating pipelines using interfaces """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) __docformat__ = 'restructuredtext' from .engine import Node, MapNode, JoinNode, Workflow diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index 0c1d3748c0..07615d4164 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -14,7 +14,8 @@ os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import object from future import standard_library diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index ed1fde9d28..e79608a557 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -13,7 +13,8 @@ os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, str, bytes, open from collections import OrderedDict diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index 034174758a..3c5dd40aac 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -10,7 +10,8 @@ from builtins import open from copy import deepcopy from glob import glob -import os, sys +import os +import sys import networkx as nx @@ -24,6 +25,7 @@ class InputSpec(nib.TraitedSpec): input2 = nib.traits.Int(desc='a random int') input_file = nib.traits.File(desc='Random File') + class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc='outputs') @@ -43,7 +45,8 @@ def _list_outputs(self): def test_init(): - with pytest.raises(TypeError): pe.Workflow() + with pytest.raises(TypeError): + pe.Workflow() pipe = pe.Workflow(name='pipe') assert type(pipe._graph) == nx.DiGraph @@ -73,9 +76,10 @@ def test_add_nodes(): # XXX - SG I'll create a graphical version of these tests and actually # ensure that all connections are tested later + @pytest.mark.parametrize("iterables, expected", [ - ({"1": None}, (1,0)), #test1 - ({"1": dict(input1=lambda: [1, 2], input2=lambda: [1, 2])}, (4,0)) #test2 + ({"1": None}, (1, 0)), # test1 + ({"1": dict(input1=lambda: [1, 2], input2=lambda: [1, 2])}, (4, 0)) # test2 ]) def test_1mod(iterables, expected): pipe = pe.Workflow(name='pipe') @@ -89,9 +93,9 @@ def test_1mod(iterables, expected): @pytest.mark.parametrize("iterables, expected", [ - ({"1": {}, "2": dict(input1=lambda: [1, 2])}, (3,2)), #test3 - ({"1": dict(input1=lambda: [1, 2]), "2": {}}, (4,2)), #test4 - ({"1": dict(input1=lambda: [1, 2]), "2": dict(input1=lambda: [1, 2])}, (6,4)) #test5 + ({"1": {}, "2": dict(input1=lambda: [1, 2])}, (3, 2)), # test3 + ({"1": dict(input1=lambda: [1, 2]), "2": {}}, (4, 2)), # test4 + ({"1": dict(input1=lambda: [1, 2]), "2": dict(input1=lambda: [1, 2])}, (6, 4)) # test5 ]) def test_2mods(iterables, expected): pipe = pe.Workflow(name='pipe') @@ -107,10 +111,10 @@ def test_2mods(iterables, expected): @pytest.mark.parametrize("iterables, expected, connect", [ - ({"1": {}, "2": dict(input1=lambda: [1, 2]), "3": {}}, (5,4), ("1-2","2-3")), #test6 - ({"1": dict(input1=lambda: [1, 2]), "2": {}, "3": {}}, (5,4), ("1-3","2-3")), #test7 + ({"1": {}, "2": dict(input1=lambda: [1, 2]), "3": {}}, (5, 4), ("1-2", "2-3")), # test6 + ({"1": dict(input1=lambda: [1, 2]), "2": {}, "3": {}}, (5, 4), ("1-3", "2-3")), # test7 ({"1": dict(input1=lambda: [1, 2]), "2": dict(input1=lambda: [1, 2]), "3": {}}, - (8,8), ("1-3","2-3")), #test8 + (8, 8), ("1-3", "2-3")), # test8 ]) def test_3mods(iterables, expected, connect): pipe = pe.Workflow(name='pipe') @@ -119,10 +123,10 @@ def test_3mods(iterables, expected, connect): mod3 = pe.Node(interface=EngineTestInterface(), name='mod3') for nr in ["1", "2", "3"]: setattr(eval("mod"+nr), "iterables", iterables[nr]) - if connect == ("1-2","2-3"): + if connect == ("1-2", "2-3"): pipe.connect([(mod1, mod2, [('output1', 'input2')]), (mod2, mod3, [('output1', 'input2')])]) - elif connect == ("1-3","2-3"): + elif connect == ("1-3", "2-3"): pipe.connect([(mod1, mod3, [('output1', 'input1')]), (mod2, mod3, [('output1', 'input2')])]) else: @@ -412,7 +416,8 @@ def test_doubleconnect(): def test_node_init(): - with pytest.raises(Exception): pe.Node() + with pytest.raises(Exception): + pe.Node() try: node = pe.Node(EngineTestInterface, name='test') except IOError: @@ -430,8 +435,10 @@ def test_workflow_add(): w1 = pe.Workflow(name='test') w1.connect(n1, 'a', n2, 'c') for node in [n1, n2, n3]: - with pytest.raises(IOError): w1.add_nodes([node]) - with pytest.raises(IOError): w1.connect([(w1, n2, [('n1.a', 'd')])]) + with pytest.raises(IOError): + w1.add_nodes([node]) + with pytest.raises(IOError): + w1.connect([(w1, n2, [('n1.a', 'd')])]) def test_node_get_output(): @@ -447,22 +454,25 @@ def test_mapnode_iterfield_check(): mod1 = pe.MapNode(EngineTestInterface(), iterfield=['input1'], name='mod1') - with pytest.raises(ValueError): mod1._check_iterfield() + with pytest.raises(ValueError): + mod1._check_iterfield() mod1 = pe.MapNode(EngineTestInterface(), iterfield=['input1', 'input2'], name='mod1') mod1.inputs.input1 = [1, 2] mod1.inputs.input2 = 3 - with pytest.raises(ValueError): mod1._check_iterfield() + with pytest.raises(ValueError): + mod1._check_iterfield() @pytest.mark.parametrize("x_inp, f_exp", [ (3, [6]), ([2, 3], [4, 6]), ((2, 3), [4, 6]), (range(3), [0, 2, 4]), - ("Str", ["StrStr"]), (["Str1", "Str2"], ["Str1Str1", "Str2Str2"]) + ("Str", ["StrStr"]), (["Str1", "Str2"], ["Str1Str1", "Str2Str2"]) ]) def test_mapnode_iterfield_type(x_inp, f_exp): from nipype import MapNode, Function + def double_func(x): return 2 * x double = Function(["x"], ["f_x"], double_func) @@ -470,7 +480,7 @@ def double_func(x): double_node = MapNode(double, name="double", iterfield=["x"]) double_node.inputs.x = x_inp - res = double_node.run() + res = double_node.run() assert res.outputs.f_x == f_exp @@ -664,7 +674,6 @@ def test_parameterize_dirs_false(tmpdir): wf.config['execution']['parameterize_dirs'] = False wf.connect([(n1, n2, [('output1', 'in1')])]) - wf.run() @@ -720,7 +729,7 @@ def test_write_graph_runs(tmpdir): except Exception: assert False, \ 'Failed to plot {} {} graph'.format( - 'simple' if simple else 'detailed', graph) + 'simple' if simple else 'detailed', graph) assert os.path.exists('graph.dot') or os.path.exists('graph_detailed.dot') try: @@ -752,7 +761,7 @@ def test_deep_nested_write_graph_runs(tmpdir): except Exception as e: assert False, \ 'Failed to plot {} {} deep graph: {!s}'.format( - 'simple' if simple else 'detailed', graph, e) + 'simple' if simple else 'detailed', graph, e) assert os.path.exists('graph.dot') or os.path.exists('graph_detailed.dot') try: diff --git a/nipype/pipeline/engine/tests/test_join.py b/nipype/pipeline/engine/tests/test_join.py index a77745eb03..90da53ecd8 100644 --- a/nipype/pipeline/engine/tests/test_join.py +++ b/nipype/pipeline/engine/tests/test_join.py @@ -3,7 +3,8 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for join expansion """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os @@ -198,8 +199,6 @@ def test_node_joinsource(tmpdir): """Test setting the joinsource to a Node.""" tmpdir.chdir() - # Make the workflow. - wf = pe.Workflow(name='test') # the iterated input node inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', [1, 2])] @@ -209,7 +208,7 @@ def test_node_joinsource(tmpdir): # the joinsource is the inputspec name assert join.joinsource == inputspec.name, \ - "The joinsource is not set to the node name." + "The joinsource is not set to the node name." def test_set_join_node(tmpdir): @@ -258,7 +257,7 @@ def test_unique_join_node(tmpdir): wf.run() assert _sum_operands[0] == [4, 2, 3], \ - "The unique join output value is incorrect: %s." % _sum_operands[0] + "The unique join output value is incorrect: %s." % _sum_operands[0] def test_multiple_join_nodes(tmpdir): @@ -422,7 +421,7 @@ def test_synchronize_join_node(tmpdir): # thus, the expanded graph contains 2 * 2 iteration pre-join nodes, 1 join # node and 1 post-join node. assert len(result.nodes()) == 6, \ - "The number of expanded nodes is incorrect." + "The number of expanded nodes is incorrect." # the product inputs are [2, 3] and [4, 5] assert _products == [8, 15], \ "The post-join products is incorrect: %s." % _products @@ -478,7 +477,7 @@ def test_itersource_join_source_node(tmpdir): assert [16, 19] in _sum_operands, \ "The join Sum input is incorrect: %s." % _sum_operands assert [7, 9] in _sum_operands, \ - "The join Sum input is incorrect: %s." % _sum_operands + "The join Sum input is incorrect: %s." % _sum_operands def test_itersource_two_join_nodes(tmpdir): @@ -550,7 +549,7 @@ def test_nested_workflow_join(tmpdir): # Make the nested workflow def nested_wf(i, name='smallwf'): - #iterables with list of nums + # iterables with list of nums inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') inputspec.iterables = [('n', i)] # increment each iterable before joining @@ -558,18 +557,18 @@ def nested_wf(i, name='smallwf'): name='pre_join') # rejoin nums into list join = pe.JoinNode(IdentityInterface(fields=['n']), - joinsource='inputspec', - joinfield='n', - name='join') - #define and connect nested workflow - wf = pe.Workflow(name='wf_%d'%i[0]) + joinsource='inputspec', + joinfield='n', + name='join') + # define and connect nested workflow + wf = pe.Workflow(name='wf_%d' % i[0]) wf.connect(inputspec, 'n', pre_join, 'input1') wf.connect(pre_join, 'output1', join, 'n') return wf # master wf meta_wf = pe.Workflow(name='meta', base_dir='.') # add each mini-workflow to master - for i in [[1,3],[2,4]]: + for i in [[1, 3], [2, 4]]: mini_wf = nested_wf(i) meta_wf.add_nodes([mini_wf]) @@ -578,5 +577,3 @@ def nested_wf(i, name='smallwf'): # there should be six nodes in total assert len(result.nodes()) == 6, \ "The number of expanded nodes is incorrect." - - diff --git a/nipype/pipeline/engine/tests/test_utils.py b/nipype/pipeline/engine/tests/test_utils.py index 23c7a16fc6..9bce2f79fb 100644 --- a/nipype/pipeline/engine/tests/test_utils.py +++ b/nipype/pipeline/engine/tests/test_utils.py @@ -3,10 +3,12 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine utils module """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, open -import os, sys +import os +import sys from copy import deepcopy from shutil import rmtree import pytest @@ -24,7 +26,6 @@ def test_function(arg1, arg2, arg3): import numpy as np return (np.array(arg1) + arg2 + arg3).tolist() - wf = pe.Workflow(name="testidentity", base_dir=tmpdir.strpath) n1 = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='src', base_dir=tmpdir.strpath) @@ -113,12 +114,12 @@ def test_function(arg1): n1.config = {'execution': {'remove_unnecessary_outputs': True}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.run() - assert tmpdir.join(n1.name,'file1.txt').check() - assert tmpdir.join(n1.name,'file1.txt').check() + assert tmpdir.join(n1.name, 'file1.txt').check() + assert tmpdir.join(n1.name, 'file1.txt').check() n1.needed_outputs = ['file2'] n1.run() - assert not tmpdir.join(n1.name,'file1.txt').check() - assert tmpdir.join(n1.name,'file2.txt').check() + assert not tmpdir.join(n1.name, 'file1.txt').check() + assert tmpdir.join(n1.name, 'file2.txt').check() class InputSpec(nib.TraitedSpec): @@ -153,13 +154,13 @@ def test_inputs_removal(tmpdir): n1.config = {'execution': {'keep_inputs': True}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.run() - assert tmpdir.join(n1.name,'file1.txt').check() + assert tmpdir.join(n1.name, 'file1.txt').check() n1.inputs.in_file = file1.strpath n1.config = {'execution': {'keep_inputs': False}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.overwrite = True n1.run() - assert not tmpdir.join(n1.name,'file1.txt').check() + assert not tmpdir.join(n1.name, 'file1.txt').check() def test_outputs_removal_wf(tmpdir): @@ -193,7 +194,6 @@ def test_function3(arg): import os return arg - for plugin in ('Linear',): # , 'MultiProc'): n1 = pe.Node(niu.Function(input_names=['arg1'], output_names=['out_file1', 'out_file2', 'dir'], @@ -323,8 +323,8 @@ def dummy_func(value): return value + 1 -@pytest.mark.skipif(sys.version_info < (3,0), - reason="the famous segfault #1788") +@pytest.mark.skipif(sys.version_info < (3, 0), + reason="the famous segfault #1788") def test_mapnode_crash(tmpdir): """Test mapnode crash when stop_on_first_crash is True""" cwd = os.getcwd() @@ -342,8 +342,8 @@ def test_mapnode_crash(tmpdir): os.chdir(cwd) -@pytest.mark.skipif(sys.version_info < (3,0), - reason="the famous segfault #1788") +@pytest.mark.skipif(sys.version_info < (3, 0), + reason="the famous segfault #1788") def test_mapnode_crash2(tmpdir): """Test mapnode crash when stop_on_first_crash is False""" cwd = os.getcwd() @@ -360,8 +360,8 @@ def test_mapnode_crash2(tmpdir): os.chdir(cwd) -@pytest.mark.skipif(sys.version_info < (3,0), - reason="the famous segfault #1788") +@pytest.mark.skipif(sys.version_info < (3, 0), + reason="the famous segfault #1788") def test_mapnode_crash3(tmpdir): """Test mapnode crash when mapnode is embedded in a workflow""" tmpdir.chdir() @@ -374,7 +374,7 @@ def test_mapnode_crash3(tmpdir): wf = pe.Workflow('testmapnodecrash') wf.add_nodes([node]) wf.base_dir = tmpdir.strpath - #changing crashdump dir to cwl (to avoid problems with read-only systems) + # changing crashdump dir to cwl (to avoid problems with read-only systems) wf.config["execution"]["crashdump_dir"] = os.getcwd() with pytest.raises(RuntimeError): wf.run(plugin='Linear') diff --git a/nipype/pipeline/engine/tests/test_workflows.py b/nipype/pipeline/engine/tests/test_workflows.py index 32b2fa3505..d2b23ae586 100644 --- a/nipype/pipeline/engine/tests/test_workflows.py +++ b/nipype/pipeline/engine/tests/test_workflows.py @@ -13,7 +13,7 @@ def test_duplicate_node_check(): wf = pe.Workflow(name="testidentity") - original_list = [0,1,2,3,4,5,6,7,8,9] + original_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] selector1 = pe.Node(niu.Select(), name="selector1") selector1.inputs.index = original_list[:-1] @@ -26,10 +26,10 @@ def test_duplicate_node_check(): selector4.inputs.index = original_list[:-4] wf_connections = [ - (selector1, selector2, [("out","inlist")]), - (selector2, selector3, [("out","inlist")]), - (selector3, selector4, [("out","inlist")]), - ] + (selector1, selector2, [("out", "inlist")]), + (selector2, selector3, [("out", "inlist")]), + (selector3, selector4, [("out", "inlist")]), + ] with pytest.raises(IOError) as excinfo: wf.connect(wf_connections) diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 61937faac3..0f11fd7ada 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -2,7 +2,8 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utility routines for workflow graphs""" -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, open, next, zip, range import os diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index e00f105c5e..2880c48886 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -14,7 +14,8 @@ os.chdir(datadir) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, bytes, open import os diff --git a/nipype/pipeline/plugins/__init__.py b/nipype/pipeline/plugins/__init__.py index 34d3abdebc..fd93146adb 100644 --- a/nipype/pipeline/plugins/__init__.py +++ b/nipype/pipeline/plugins/__init__.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from .debug import DebugPlugin from .linear import LinearPlugin @@ -20,4 +21,4 @@ from .slurm import SLURMPlugin from .slurmgraph import SLURMGraphPlugin -from . import semaphore_singleton +from . import semaphore_singleton diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index ec8c68a148..8e63bd4add 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -3,7 +3,8 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Common graph operations for execution """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, object, open from copy import deepcopy diff --git a/nipype/pipeline/plugins/condor.py b/nipype/pipeline/plugins/condor.py index 0548a7afbc..68b92625e1 100644 --- a/nipype/pipeline/plugins/condor.py +++ b/nipype/pipeline/plugins/condor.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Parallel workflow execution via Condor """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os from time import sleep @@ -102,7 +103,7 @@ def _submit_batchtask(self, scriptfile, node): except Exception as e: if tries < self._max_tries: tries += 1 - sleep(self._retry_timeout) # sleep 2 seconds and try again. + sleep(self._retry_timeout) # sleep 2 seconds and try again else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join((('Could not submit condor ' diff --git a/nipype/pipeline/plugins/dagman.py b/nipype/pipeline/plugins/dagman.py index ce2a2a5592..f413d72c82 100644 --- a/nipype/pipeline/plugins/dagman.py +++ b/nipype/pipeline/plugins/dagman.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Parallel workflow execution via Condor DAGMan """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os diff --git a/nipype/pipeline/plugins/debug.py b/nipype/pipeline/plugins/debug.py index 7c8fd451aa..9921bb9cf4 100644 --- a/nipype/pipeline/plugins/debug.py +++ b/nipype/pipeline/plugins/debug.py @@ -3,7 +3,8 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Debug plugin """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import networkx as nx from .base import PluginBase, logger diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index b19b4221f6..5f2d697c1c 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -3,7 +3,8 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via IPython controller """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from future import standard_library standard_library.install_aliases() @@ -22,7 +23,6 @@ IPython_not_loaded = True - def execute_task(pckld_task, node_config, updatehash): from socket import gethostname from traceback import format_exc @@ -80,12 +80,14 @@ def run(self, graph, config, updatehash=False): if isinstance(e, TimeoutError): raise_from(Exception("No IPython clients found."), e) if isinstance(e, IOError): - raise_from(Exception("ipcluster/ipcontroller has not been started"), e) + raise_from(Exception( + "ipcluster/ipcontroller has not been started"), e) if isinstance(e, ValueError): raise_from(Exception("Ipython kernel not installed"), e) else: raise e - return super(IPythonPlugin, self).run(graph, config, updatehash=updatehash) + return super(IPythonPlugin, self).run(graph, config, + updatehash=updatehash) def _get_result(self, taskid): if taskid not in self.taskmap: @@ -102,10 +104,8 @@ def _get_result(self, taskid): def _submit_job(self, node, updatehash=False): pckld_node = dumps(node, 2) - result_object = self.taskclient.load_balanced_view().apply(execute_task, - pckld_node, - node.config, - updatehash) + result_object = self.taskclient.load_balanced_view().apply( + execute_task, pckld_node, node.config, updatehash) self._taskid += 1 self.taskmap[self._taskid] = result_object return self._taskid diff --git a/nipype/pipeline/plugins/ipythonx.py b/nipype/pipeline/plugins/ipythonx.py index d76cdfeb98..1aec39fff7 100644 --- a/nipype/pipeline/plugins/ipythonx.py +++ b/nipype/pipeline/plugins/ipythonx.py @@ -3,7 +3,8 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via IPython controller """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import sys from future.utils import raise_from @@ -47,8 +48,8 @@ def run(self, graph, config, updatehash=False): __import__(name) self.ipyclient = sys.modules[name] except ImportError as e: - raise_from(ImportError("Ipython kernel not found. Parallel execution " - "will be unavailable"), e) + raise_from(ImportError("Ipython kernel not found. Parallel " + "execution will be unavailable"), e) try: self.taskclient = self.ipyclient.TaskClient() except Exception as e: @@ -56,7 +57,8 @@ def run(self, graph, config, updatehash=False): raise_from(Exception("No IPython clients found."), e) if isinstance(e, ValueError): raise_from(Exception("Ipython kernel not installed"), e) - return super(IPythonXPlugin, self).run(graph, config, updatehash=updatehash) + return super(IPythonXPlugin, self).run(graph, config, + updatehash=updatehash) def _get_result(self, taskid): return self.taskclient.get_task_result(taskid, block=False) diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index bdb61b8c44..67d5e25a43 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -3,7 +3,8 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Local serial workflow execution """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os diff --git a/nipype/pipeline/plugins/lsf.py b/nipype/pipeline/plugins/lsf.py index 5ee0483221..51b349529a 100644 --- a/nipype/pipeline/plugins/lsf.py +++ b/nipype/pipeline/plugins/lsf.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Parallel workflow execution via LSF """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import re @@ -42,10 +43,11 @@ def __init__(self, **kwargs): super(LSFPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): - """LSF lists a status of 'PEND' when a job has been submitted but is waiting to be picked up, - and 'RUN' when it is actively being processed. But _is_pending should return True until a job has - finished and is ready to be checked for completeness. So return True if status is either 'PEND' - or 'RUN'""" + """LSF lists a status of 'PEND' when a job has been submitted but is + waiting to be picked up, and 'RUN' when it is actively being processed. + But _is_pending should return True until a job has finished and is + ready to be checked for completeness. So return True if status is + either 'PEND' or 'RUN'""" cmd = CommandLine('bjobs', resource_monitor=False, terminal_output='allatonce') @@ -65,7 +67,6 @@ def _submit_batchtask(self, scriptfile, node): cmd = CommandLine('bsub', environ=dict(os.environ), resource_monitor=False, terminal_output='allatonce') - path = os.path.dirname(scriptfile) bsubargs = '' if self._bsub_args: bsubargs = self._bsub_args @@ -78,7 +79,8 @@ def _submit_batchtask(self, scriptfile, node): if '-o' not in bsubargs: # -o outfile bsubargs = '%s -o %s' % (bsubargs, scriptfile + ".log") if '-e' not in bsubargs: - bsubargs = '%s -e %s' % (bsubargs, scriptfile + ".log") # -e error file + # -e error file + bsubargs = '%s -e %s' % (bsubargs, scriptfile + ".log") if node._hierarchy: jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, @@ -117,8 +119,9 @@ def _submit_batchtask(self, scriptfile, node): if match: taskid = int(match.groups()[0]) else: - raise ScriptError("Can't parse submission job output id: %s" % - result.runtime.stdout) + raise ScriptError( + "Can't parse submission job output id: %s" % + result.runtime.stdout) self._pending[taskid] = node.output_dir() logger.debug('submitted lsf task: %d for node %s' % (taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 86c021decd..61f603186d 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -6,7 +6,8 @@ Support for child processes running as non-daemons based on http://stackoverflow.com/a/8963618/1183453 """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) # Import packages from multiprocessing import Process, Pool, cpu_count, pool @@ -238,10 +239,12 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): tasks_list_msg = '\nCurrently running:\n' tasks_list_msg += '\n'.join(running_tasks) tasks_list_msg = indent(tasks_list_msg, ' ' * 21) - logger.info('[MultiProc] Running %d tasks, and %d jobs ready. Free ' - 'memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s', - len(self.pending_tasks), len(jobids), free_memory_gb, self.memory_gb, - free_processors, self.processors, tasks_list_msg) + logger.info( + '[MultiProc] Running %d tasks, and %d jobs ready. Free ' + 'memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s', + len(self.pending_tasks), len(jobids), free_memory_gb, + self.memory_gb, free_processors, self.processors, + tasks_list_msg) self._stats = stats if free_memory_gb < 0.01 or free_processors == 0: diff --git a/nipype/pipeline/plugins/oar.py b/nipype/pipeline/plugins/oar.py index e3f5ef7947..7dcbfb9fd6 100644 --- a/nipype/pipeline/plugins/oar.py +++ b/nipype/pipeline/plugins/oar.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Parallel workflow execution via OAR http://oar.imag.fr """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, open import os @@ -15,6 +16,7 @@ from .base import SGELikeBatchManagerBase, logger iflogger = logging.getLogger('interface') + class OARPlugin(SGELikeBatchManagerBase): """Execute using OAR diff --git a/nipype/pipeline/plugins/pbs.py b/nipype/pipeline/plugins/pbs.py index 6154abad74..cefd12dcda 100644 --- a/nipype/pipeline/plugins/pbs.py +++ b/nipype/pipeline/plugins/pbs.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, open import os @@ -43,7 +44,8 @@ def __init__(self, **kwargs): if 'max_tries' in kwargs['plugin_args']: self._max_tries = kwargs['plugin_args']['max_tries'] if 'max_jobname_len' in kwargs['plugin_args']: - self._max_jobname_len = kwargs['plugin_args']['max_jobname_len'] + self._max_jobname_len = kwargs[ + 'plugin_args']['max_jobname_len'] super(PBSPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): @@ -52,7 +54,6 @@ def _is_pending(self, taskid): terminal_output='allatonce', resource_monitor=False, ignore_exception=True).run() - stderr = result.runtime.stderr errmsg = 'Unknown Job Id' # %s' % taskid success = 'Job has finished' if success in e: # Fix for my PBS @@ -102,17 +103,20 @@ def _submit_batchtask(self, scriptfile, node): except Exception as e: if tries < self._max_tries: tries += 1 - sleep(self._retry_timeout) # sleep 2 seconds and try again. + # sleep 2 seconds and try again. + sleep(self._retry_timeout) else: iflogger.setLevel(oldlevel) raise RuntimeError( - 'Could not submit pbs task for node {}\n{}'.format(node._id, e)) + 'Could not submit pbs task for node {}\n{}'.format( + node._id, e)) else: break iflogger.setLevel(oldlevel) # retrieve pbs taskid taskid = result.runtime.stdout.split('.')[0] self._pending[taskid] = node.output_dir() - logger.debug('submitted pbs task: {} for node {}'.format(taskid, node._id)) + logger.debug( + 'submitted pbs task: {} for node {}'.format(taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/pbsgraph.py b/nipype/pipeline/plugins/pbsgraph.py index 719b82578c..3f3b64c592 100644 --- a/nipype/pipeline/plugins/pbsgraph.py +++ b/nipype/pipeline/plugins/pbsgraph.py @@ -1,6 +1,7 @@ """Parallel workflow execution via PBS/Torque """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os diff --git a/nipype/pipeline/plugins/semaphore_singleton.py b/nipype/pipeline/plugins/semaphore_singleton.py index 786026a695..96dfe657bd 100644 --- a/nipype/pipeline/plugins/semaphore_singleton.py +++ b/nipype/pipeline/plugins/semaphore_singleton.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import threading semaphore = threading.Semaphore(0) diff --git a/nipype/pipeline/plugins/sge.py b/nipype/pipeline/plugins/sge.py index 42aa4bc915..44fb0e6b64 100644 --- a/nipype/pipeline/plugins/sge.py +++ b/nipype/pipeline/plugins/sge.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import object diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index 882c455450..fc8097fdd9 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os @@ -123,7 +124,7 @@ def make_job_name(jobnumber, nodeslist): values = ' ' for jobid in dependencies[idx]: # Avoid dependancies of done jobs - if not self._dont_resubmit_completed_jobs or cache_doneness_per_node[jobid] == False: + if not self._dont_resubmit_completed_jobs or not cache_doneness_per_node[jobid]: values += "${{{0}}},".format(make_job_name(jobid, nodes)) if values != ' ': # i.e. if some jobs were added to dependency list values = values.rstrip(',') diff --git a/nipype/pipeline/plugins/slurm.py b/nipype/pipeline/plugins/slurm.py index e5b797da5d..e1bc50166a 100644 --- a/nipype/pipeline/plugins/slurm.py +++ b/nipype/pipeline/plugins/slurm.py @@ -5,7 +5,8 @@ Parallel workflow execution with SLURM ''' -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os @@ -89,9 +90,11 @@ def _submit_batchtask(self, scriptfile, node): else: sbatch_args += (" " + node.plugin_args['sbatch_args']) if '-o' not in sbatch_args: - sbatch_args = '%s -o %s' % (sbatch_args, os.path.join(path, 'slurm-%j.out')) + sbatch_args = '%s -o %s' % (sbatch_args, + os.path.join(path, 'slurm-%j.out')) if '-e' not in sbatch_args: - sbatch_args = '%s -e %s' % (sbatch_args, os.path.join(path, 'slurm-%j.out')) + sbatch_args = '%s -e %s' % (sbatch_args, + os.path.join(path, 'slurm-%j.out')) if node._hierarchy: jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, @@ -114,12 +117,14 @@ def _submit_batchtask(self, scriptfile, node): except Exception as e: if tries < self._max_tries: tries += 1 - sleep(self._retry_timeout) # sleep 2 seconds and try again. + # sleep 2 seconds and try again. + sleep(self._retry_timeout) else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit sbatch task' - ' for node %s') % node._id, - str(e)))) + raise RuntimeError('\n'.join((( + 'Could not submit sbatch task' + ' for node %s') % node._id, + str(e)))) else: break logger.debug('Ran command ({0})'.format(cmd.cmdline)) @@ -129,6 +134,6 @@ def _submit_batchtask(self, scriptfile, node): taskid = int(re.match(self._jobid_re, lines[-1]).groups()[0]) self._pending[taskid] = node.output_dir() - logger.debug('submitted sbatch task: %d for node %s' % (taskid, node._id)) + logger.debug('submitted sbatch task: %d for node %s' % + (taskid, node._id)) return taskid - diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index ed571ecffe..bfb1eda1b0 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Parallel workflow execution via SLURM """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os @@ -122,7 +123,7 @@ def make_job_name(jobnumber, nodeslist): values = '' for jobid in dependencies[idx]: # Avoid dependancies of done jobs - if not self._dont_resubmit_completed_jobs or cache_doneness_per_node[jobid] == False: + if not self._dont_resubmit_completed_jobs or not cache_doneness_per_node[jobid]: values += "${{{0}}}:".format(make_job_name(jobid, nodes)) if values != '': # i.e. if some jobs were added to dependency list values = values.rstrip(':') diff --git a/nipype/pipeline/plugins/somaflow.py b/nipype/pipeline/plugins/somaflow.py index f384e35adf..67a1ab6bc5 100644 --- a/nipype/pipeline/plugins/somaflow.py +++ b/nipype/pipeline/plugins/somaflow.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import sys diff --git a/nipype/pipeline/plugins/tests/test_base.py b/nipype/pipeline/plugins/tests/test_base.py index f8838e691a..49928cfe4b 100644 --- a/nipype/pipeline/plugins/tests/test_base.py +++ b/nipype/pipeline/plugins/tests/test_base.py @@ -6,12 +6,14 @@ import numpy as np import scipy.sparse as ssp + def test_scipy_sparse(): foo = ssp.lil_matrix(np.eye(3, k=1)) goo = foo.getrowview(0) goo[goo.nonzero()] = 0 assert foo[0, 1] == 0 + ''' Can use the following code to test that a mapnode crash continues successfully Need to put this into a nose-test with a timeout @@ -26,7 +28,8 @@ def func(arg1): raise Exception('arg cannot be ' + str(arg1)) return arg1 -funkynode = pe.MapNode(niu.Function(function=func, input_names=['arg1'], output_names=['out']), +funkynode = pe.MapNode(niu.Function(function=func, input_names=['arg1'], + output_names=['out']), iterfield=['arg1'], name = 'functor') funkynode.inputs.arg1 = [1,2] diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index 7212ff7302..bbaeb34670 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -68,6 +68,7 @@ def test_callback_exception(tmpdir): assert so.statuses[0][1] == 'start' assert so.statuses[1][1] == 'exception' + def test_callback_multiproc_normal(tmpdir): tmpdir.chdir() @@ -86,6 +87,7 @@ def test_callback_multiproc_normal(tmpdir): assert so.statuses[0][1] == 'start' assert so.statuses[1][1] == 'end' + def test_callback_multiproc_exception(tmpdir): tmpdir.chdir() @@ -107,4 +109,3 @@ def test_callback_multiproc_exception(tmpdir): assert n.name == 'f_node' assert so.statuses[0][1] == 'start' assert so.statuses[1][1] == 'exception' - diff --git a/nipype/pipeline/plugins/tests/test_debug.py b/nipype/pipeline/plugins/tests/test_debug.py index e7997ba7f0..0b6bbc7150 100644 --- a/nipype/pipeline/plugins/tests/test_debug.py +++ b/nipype/pipeline/plugins/tests/test_debug.py @@ -46,7 +46,8 @@ def test_debug(tmpdir): mod1.inputs.input1 = 1 run_wf = lambda: pipe.run(plugin="Debug") - with pytest.raises(ValueError): run_wf() + with pytest.raises(ValueError): + run_wf() exc = None try: diff --git a/nipype/pipeline/plugins/tests/test_linear.py b/nipype/pipeline/plugins/tests/test_linear.py index afb916f6eb..3b95379bf2 100644 --- a/nipype/pipeline/plugins/tests/test_linear.py +++ b/nipype/pipeline/plugins/tests/test_linear.py @@ -40,7 +40,8 @@ def test_run_in_series(tmpdir): pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="Linear") - names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] + names = ['.'.join((node._hierarchy, node.name)) + for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index('pipe.mod1')] result = node.get_output('output1') assert result == [1, 1] diff --git a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py index 7112aa2448..0c737534a9 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py @@ -3,11 +3,13 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Testing module for functions and classes from multiproc.py """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, open # Import packages -import os, sys +import os +import sys from tempfile import mkdtemp from shutil import rmtree import pytest @@ -89,6 +91,7 @@ def dummyFunction(filename): return total + def run_multiproc_nondaemon_with_flag(nondaemon_flag): ''' Start a pipe with two nodes using the resource multiproc plugin and @@ -116,26 +119,30 @@ def run_multiproc_nondaemon_with_flag(nondaemon_flag): pipe.config['execution']['stop_on_first_crash'] = True - # execute the pipe using the MultiProc plugin with 2 processes and the non_daemon flag - # to enable child processes which start other multiprocessing jobs + # execute the pipe using the MultiProc plugin with 2 processes and the + # non_daemon flag to enable child processes which start other + # multiprocessing jobs execgraph = pipe.run(plugin="MultiProc", plugin_args={'n_procs': 2, 'non_daemon': nondaemon_flag}) - names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] + names = ['.'.join((node._hierarchy, node.name)) + for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index('pipe.f2')] result = node.get_output('sum_out') os.chdir(cur_dir) rmtree(temp_dir) return result + def test_run_multiproc_nondaemon_false(): ''' - This is the entry point for the test. Two times a pipe of several multiprocessing jobs gets - executed. First, without the nondaemon flag. Second, with the nondaemon flag. + This is the entry point for the test. Two times a pipe of several + multiprocessing jobs gets executed. First, without the nondaemon flag. + Second, with the nondaemon flag. - Since the processes of the pipe start child processes, the execution only succeeds when the - non_daemon flag is on. + Since the processes of the pipe start child processes, the execution only + succeeds when the non_daemon flag is on. ''' shouldHaveFailed = False try: @@ -145,6 +152,7 @@ def test_run_multiproc_nondaemon_false(): shouldHaveFailed = True assert shouldHaveFailed + def test_run_multiproc_nondaemon_true(): # with nondaemon_flag = True, the execution should succeed result = run_multiproc_nondaemon_with_flag(True) diff --git a/nipype/pipeline/plugins/tests/test_pbs.py b/nipype/pipeline/plugins/tests/test_pbs.py index 719ffbfc72..98a0dc0473 100644 --- a/nipype/pipeline/plugins/tests/test_pbs.py +++ b/nipype/pipeline/plugins/tests/test_pbs.py @@ -47,7 +47,8 @@ def test_run_pbsgraph(): pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="PBSGraph") - names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] + names = ['.'.join((node._hierarchy, node.name)) + for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index('pipe.mod1')] result = node.get_output('output1') assert result == [1, 1] diff --git a/nipype/pipeline/plugins/tests/test_somaflow.py b/nipype/pipeline/plugins/tests/test_somaflow.py index 7449d0d3ae..d413f38ec7 100644 --- a/nipype/pipeline/plugins/tests/test_somaflow.py +++ b/nipype/pipeline/plugins/tests/test_somaflow.py @@ -45,7 +45,8 @@ def test_run_somaflow(tmpdir): pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="SomaFlow") - names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] + names = ['.'.join((node._hierarchy, node.name)) + for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index('pipe.mod1')] result = node.get_output('output1') assert result == [1, 1] diff --git a/nipype/pipeline/plugins/tests/test_tools.py b/nipype/pipeline/plugins/tests/test_tools.py index 479cc773df..49b2de63dc 100644 --- a/nipype/pipeline/plugins/tests/test_tools.py +++ b/nipype/pipeline/plugins/tests/test_tools.py @@ -11,26 +11,31 @@ from nipype.pipeline.plugins.tools import report_crash + def test_report_crash(): with mock.patch('pickle.dump', mock.MagicMock()) as mock_pickle_dump: - with mock.patch('nipype.pipeline.plugins.tools.format_exception', mock.MagicMock()): # see iss 1517 + with mock.patch('nipype.pipeline.plugins.tools.format_exception', + mock.MagicMock()): # see iss 1517 mock_pickle_dump.return_value = True mock_node = mock.MagicMock(name='mock_node') mock_node._id = 'an_id' mock_node.config = { - 'execution' : { - 'crashdump_dir' : '.', - 'crashfile_format' : 'pklz', + 'execution': { + 'crashdump_dir': '.', + 'crashfile_format': 'pklz', } } actual_crashfile = report_crash(mock_node) - expected_crashfile = re.compile('.*/crash-.*-an_id-[0-9a-f\-]*.pklz') + expected_crashfile = re.compile( + '.*/crash-.*-an_id-[0-9a-f\-]*.pklz') - assert expected_crashfile.match(actual_crashfile).group() == actual_crashfile + assert expected_crashfile.match( + actual_crashfile).group() == actual_crashfile assert mock_pickle_dump.call_count == 1 + ''' Can use the following code to test that a mapnode crash continues successfully Need to put this into a nose-test with a timeout @@ -45,7 +50,8 @@ def func(arg1): raise Exception('arg cannot be ' + str(arg1)) return arg1 -funkynode = pe.MapNode(niu.Function(function=func, input_names=['arg1'], output_names=['out']), +funkynode = pe.MapNode(niu.Function(function=func, input_names=['arg1'], + output_names=['out']), iterfield=['arg1'], name = 'functor') funkynode.inputs.arg1 = [1,2] diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index c07a8966b6..5df106a8a4 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -3,7 +3,8 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Common graph operations for execution """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import open import os diff --git a/nipype/pkg_info.py b/nipype/pkg_info.py index f1323750a1..3dab459017 100644 --- a/nipype/pkg_info.py +++ b/nipype/pkg_info.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from future import standard_library standard_library.install_aliases() @@ -13,11 +14,12 @@ COMMIT_INFO_FNAME = 'COMMIT_INFO.txt' PY3 = sys.version_info[0] >= 3 + def pkg_commit_hash(pkg_path): ''' Get short form of commit hash given directory `pkg_path` There should be a file called 'COMMIT_INFO.txt' in `pkg_path`. This is a - file in INI file format, with at least one section: ``commit hash``, and two + file in INI file format, with at least one section: ``commit hash`` and two variables ``archive_subst_hash`` and ``install_hash``. The first has a substitution pattern in it which may have been filled by the execution of ``git archive`` if this is an archive generated that way. The second is diff --git a/nipype/scripts/__init__.py b/nipype/scripts/__init__.py index 8b13789179..e69de29bb2 100644 --- a/nipype/scripts/__init__.py +++ b/nipype/scripts/__init__.py @@ -1 +0,0 @@ - diff --git a/nipype/scripts/cli.py b/nipype/scripts/cli.py index 520030741e..3818d5c96c 100644 --- a/nipype/scripts/cli.py +++ b/nipype/scripts/cli.py @@ -17,6 +17,7 @@ from .. import __version__ + # declare the CLI group @click.group(context_settings=CONTEXT_SETTINGS) def cli(): @@ -25,7 +26,8 @@ def cli(): @cli.command(context_settings=CONTEXT_SETTINGS) @click.argument('logdir', type=ExistingDirPath, callback=check_not_none) -@click.option('-r', '--regex', type=RegularExpression(), callback=check_not_none, +@click.option('-r', '--regex', type=RegularExpression(), + callback=check_not_none, help='Regular expression to be searched in each traceback.') def search(logdir, regex): """Search for tracebacks content. @@ -152,11 +154,13 @@ def run(ctx, module, interface, list, help): args = iface_parser.parse_args(args=ctx.args) run_instance(node, args) + @cli.command(context_settings=CONTEXT_SETTINGS) def version(): """Print current version of Nipype.""" click.echo(__version__) + @cli.group() def convert(): """Export nipype interfaces to other formats.""" @@ -171,13 +175,16 @@ def convert(): help="Module where the interface is defined.") @click.option("-o", "--output", type=UnexistingFilePath, required=True, callback=check_not_none, - help="JSON file name where the Boutiques descriptor will be written.") + help="JSON file name where the Boutiques descriptor will be " + "written.") @click.option("-t", "--ignored-template-inputs", type=str, multiple=True, help="Interface inputs ignored in path template creations.") @click.option("-d", "--docker-image", type=str, - help="Name of the Docker image where the Nipype interface is available.") + help="Name of the Docker image where the Nipype interface is " + "available.") @click.option("-r", "--docker-index", type=str, - help="Docker index where the Docker image is stored (e.g. http://index.docker.io).") + help="Docker index where the Docker image is stored (e.g. " + "http://index.docker.io).") @click.option("-n", "--ignore-template-numbers", is_flag=True, flag_value=True, help="Ignore all numbers in path template creations.") @click.option("-v", "--verbose", is_flag=True, flag_value=True, diff --git a/nipype/scripts/crash_files.py b/nipype/scripts/crash_files.py index 363e0abf80..b7b83dff5c 100644 --- a/nipype/scripts/crash_files.py +++ b/nipype/scripts/crash_files.py @@ -68,7 +68,7 @@ def display_crash_file(crashfile, rerun, debug, directory): print("\n") print("Traceback: ") print(''.join(tb)) - print ("\n") + print("\n") if rerun: if node is None: diff --git a/nipype/scripts/instance.py b/nipype/scripts/instance.py index 959bb6b82a..52cc40a21d 100644 --- a/nipype/scripts/instance.py +++ b/nipype/scripts/instance.py @@ -14,8 +14,8 @@ def import_module(module_path): absolute or relative terms (e.g. either pkg.mod or ..mod). If the name is specified in relative terms, then the package argument must be set to the name of the package which is to act as the anchor - for resolving the package name (e.g. import_module('..mod', 'pkg.subpkg') - will import pkg.mod). + for resolving the package name (e.g. import_module('..mod', + 'pkg.subpkg') will import pkg.mod). Parameters ---------- @@ -29,7 +29,8 @@ def import_module(module_path): try: mod = importlib.import_module(module_path) except: - raise ImportError('Error when importing object {}.'.format(module_path)) + raise ImportError('Error when importing object {}.'.format( + module_path)) else: return mod diff --git a/nipype/scripts/utils.py b/nipype/scripts/utils.py index e35f4d464e..3a2e77c871 100644 --- a/nipype/scripts/utils.py +++ b/nipype/scripts/utils.py @@ -2,7 +2,8 @@ """ Utilities for the CLI functions. """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import bytes, str @@ -20,7 +21,7 @@ ignore_unknown_options=True) # specification of existing ParamTypes -ExistingDirPath = click.Path(exists=True, file_okay=False, resolve_path=True) +ExistingDirPath = click.Path(exists=True, file_okay=False, resolve_path=True) ExistingFilePath = click.Path(exists=True, dir_okay=False, resolve_path=True) UnexistingFilePath = click.Path(dir_okay=False, resolve_path=True) @@ -40,7 +41,8 @@ def convert(self, value, param, ctx): try: rex = re.compile(value, re.IGNORECASE) except ValueError: - self.fail('%s is not a valid regular expression.' % value, param, ctx) + self.fail('%s is not a valid regular expression.' % value, param, + ctx) else: return rex @@ -61,7 +63,8 @@ def add_args_options(arg_parser, interface): """Add arguments to `arg_parser` to create a CLI for `interface`.""" inputs = interface.input_spec() for name, spec in sorted(interface.inputs.traits(transient=None).items()): - desc = "\n".join(interface._get_trait_desc(inputs, name, spec))[len(name) + 2:] + desc = "\n".join(interface._get_trait_desc( + inputs, name, spec))[len(name) + 2:] # Escape any % signs with a % desc = desc.replace('%', '%%') args = {} diff --git a/nipype/sphinxext/__init__.py b/nipype/sphinxext/__init__.py index b2033960f3..5f8ba4da2b 100644 --- a/nipype/sphinxext/__init__.py +++ b/nipype/sphinxext/__init__.py @@ -2,4 +2,5 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, division, absolute_import, unicode_literals +from __future__ import (print_function, division, absolute_import, + unicode_literals) diff --git a/nipype/sphinxext/plot_workflow.py b/nipype/sphinxext/plot_workflow.py index 8876a6878d..c4e590bf0a 100644 --- a/nipype/sphinxext/plot_workflow.py +++ b/nipype/sphinxext/plot_workflow.py @@ -108,7 +108,12 @@ """ from __future__ import print_function, division, absolute_import, unicode_literals -import sys, os, shutil, io, re, textwrap +import sys +import os +import shutil +import io +import re +import textwrap from os.path import relpath from errno import EEXIST import traceback @@ -124,12 +129,14 @@ try: # Sphinx depends on either Jinja or Jinja2 import jinja2 + def format_template(template, **kw): return jinja2.Template(template).render(**kw) except ImportError as e: missing_imports.append(str(e)) try: import jinja + def format_template(template, **kw): return jinja.from_string(template, **kw) missing_imports.pop() @@ -141,6 +148,7 @@ def format_template(template, **kw): PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 + def _mkdirp(folder): """ Equivalent to bash's mkdir -p @@ -159,13 +167,14 @@ def _mkdirp(folder): def wf_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): + content_offset, block_text, state, state_machine): if len(missing_imports) == 0: return run(arguments, content, options, state_machine, state, lineno) else: raise ImportError('\n'.join(missing_imports)) wf_directive.__doc__ = __doc__ + def _option_boolean(arg): if not arg or not arg.strip(): # no argument given, assume used as a flag @@ -177,9 +186,11 @@ def _option_boolean(arg): else: raise ValueError('"%s" unknown boolean' % arg) + def _option_graph2use(arg): return directives.choice(arg, ('hierarchical', 'colored', 'flat', 'orig', 'exec')) + def _option_context(arg): if arg in [None, 'reset', 'close-figs']: return arg @@ -266,9 +277,9 @@ def setup(app): return metadata -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ # Doctest handling -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ def contains_doctest(text): try: @@ -309,9 +320,9 @@ def remove_coding(text): sub_re = re.compile("^#\s*-\*-\s*coding:\s*.*-\*-$", flags=re.MULTILINE) return sub_re.sub("", text) -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ # Template -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ TEMPLATE = """ @@ -376,6 +387,7 @@ def remove_coding(text): # :context: option wf_context = dict() + class ImageFile(object): def __init__(self, basename, dirname): self.basename = basename @@ -473,6 +485,7 @@ def _dummy_print(*arg, **kwarg): sys.stdout = stdout return ns + def get_wf_formats(config): default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200} formats = [] @@ -521,7 +534,7 @@ def render_figures(code, code_path, output_dir, output_base, context, graph2use=graph2use, simple_form=simple_form) shutil.move(src, img_path) - except Exception as err: + except Exception: raise GraphError(traceback.format_exc()) img.formats.append(fmt) @@ -616,7 +629,7 @@ def run(arguments, content, options, state_machine, state, lineno): dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir, source_rel_dir)) if not os.path.exists(dest_dir): - os.makedirs(dest_dir) # no problem here for me, but just use built-ins + os.makedirs(dest_dir) # no problem here for me, but just use built-ins # how to link to files from the RST file dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir), @@ -646,8 +659,8 @@ def run(arguments, content, options, state_machine, state, lineno): except GraphError as err: reporter = state.memo.reporter sm = reporter.system_message( - 2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base, - source_file_name, err), + 2, "Exception occurred in plotting %s\n from %s:\n%s" % ( + output_base, source_file_name, err), line=lineno) results = [(code, [])] errors = [sm] diff --git a/nipype/testing/decorators.py b/nipype/testing/decorators.py index 35de0cbf00..3cb6a1f341 100644 --- a/nipype/testing/decorators.py +++ b/nipype/testing/decorators.py @@ -5,7 +5,7 @@ Extend numpy's decorators to use nipype's gui and data labels. """ -from numpy.testing.decorators import * +from numpy.testing.decorators import knownfailureif, skipif from nibabel.data import DataError @@ -17,10 +17,10 @@ def make_label_dec(label, ds=None): ---------- label : str or sequence One or more labels that will be applied by the decorator to the - functions it decorates. Labels are attributes of the decorated function + functions it decorates. Labels are attributes of the decorated function with their value set to True. ds : str - An optional docstring for the resulting decorator. If not given, a + An optional docstring for the resulting decorator. If not given, a default docstring is auto-generated. Returns diff --git a/nipype/testing/fixtures.py b/nipype/testing/fixtures.py index 550346d1db..390f76feb9 100644 --- a/nipype/testing/fixtures.py +++ b/nipype/testing/fixtures.py @@ -5,7 +5,8 @@ """ Pytest fixtures used in tests. """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os @@ -41,7 +42,7 @@ def nifti_image_files(outdir, filelist, shape): def create_files_in_directory(request, tmpdir): cwd = tmpdir.chdir() filelist = ['a.nii', 'b.nii'] - nifti_image_files(tmpdir.strpath, filelist, shape=(3,3,3,4)) + nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) def change_directory(): cwd.chdir() @@ -67,7 +68,7 @@ def change_directory(): def create_files_in_directory_plus_dummy_file(request, tmpdir): cwd = tmpdir.chdir() filelist = ['a.nii', 'b.nii'] - nifti_image_files(tmpdir.strpath, filelist, shape=(3,3,3,4)) + nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) tmpdir.join('reg.dat').write('dummy file') filelist.append('reg.dat') @@ -103,12 +104,13 @@ def set_output_type(fsl_output_type): FSLCommand.set_default_output_type(Info.output_type()) return prev_output_type + @pytest.fixture(params=[None]+list(Info.ftypes)) def create_files_in_directory_plus_output_type(request, tmpdir): func_prev_type = set_output_type(request.param) origdir = tmpdir.chdir() filelist = ['a.nii', 'b.nii'] - nifti_image_files(tmpdir.strpath, filelist, shape=(3,3,3,4)) + nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) out_ext = Info.output_type_to_ext(Info.output_type()) diff --git a/nipype/testing/tests/test_utils.py b/nipype/testing/tests/test_utils.py index 838c3d167a..bfd9c12d2f 100644 --- a/nipype/testing/tests/test_utils.py +++ b/nipype/testing/tests/test_utils.py @@ -20,8 +20,9 @@ def test_tempfatfs(): with fatfs as tmp_dir: assert os.path.exists(tmp_dir) + @patch('subprocess.check_call', MagicMock( - side_effect=subprocess.CalledProcessError('',''))) + side_effect=subprocess.CalledProcessError('', ''))) def test_tempfatfs_calledprocesserror(): try: TempFATFS() @@ -31,6 +32,7 @@ def test_tempfatfs_calledprocesserror(): else: assert False + @patch('subprocess.check_call', MagicMock()) @patch('subprocess.Popen', MagicMock(side_effect=OSError())) def test_tempfatfs_oserror(): diff --git a/nipype/testing/utils.py b/nipype/testing/utils.py index 7c03ca6d04..667edfe53c 100644 --- a/nipype/testing/utils.py +++ b/nipype/testing/utils.py @@ -3,7 +3,8 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Additional handy utilities for testing """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import range, object, open import os @@ -93,6 +94,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.dev_null.close() shutil.rmtree(self.tmpdir) + def save_toy_nii(ndarray, filename): toy = nb.Nifti1Image(ndarray, np.eye(4)) nb.nifti1.save(toy, filename) diff --git a/nipype/tests/test_nipype.py b/nipype/tests/test_nipype.py index 05aa1abf86..8eabc94bcd 100644 --- a/nipype/tests/test_nipype.py +++ b/nipype/tests/test_nipype.py @@ -1,13 +1,13 @@ from .. import get_info + def test_nipype_info(): exception_not_raised = True try: get_info() - except Exception as e: + except Exception: exception_not_raised = False assert exception_not_raised # def test_fail_always(): # assert False - diff --git a/nipype/utils/config.py b/nipype/utils/config.py index c02be71f64..a3dc0c4015 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -9,7 +9,8 @@ @author: Chris Filo Gorgolewski ''' -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) import os import sys import errno @@ -109,10 +110,12 @@ def __init__(self, *args, **kwargs): for option in CONFIG_DEPRECATIONS: for section in ['execution', 'logging', 'monitoring']: if self.has_option(section, option): - new_section, new_option = CONFIG_DEPRECATIONS[option][0].split('.') + new_section, new_option = CONFIG_DEPRECATIONS[ + option][0].split('.') if not self.has_option(new_section, new_option): # Warn implicit in get - self.set(new_section, new_option, self.get(section, option)) + self.set(new_section, new_option, self.get( + section, option)) @property def cwd(self): @@ -160,9 +163,10 @@ def set_log_dir(self, log_dir): def get(self, section, option, default=None): """Get an option""" if option in CONFIG_DEPRECATIONS: - msg = ('Config option "%s" has been deprecated as of nipype %s. Please use ' - '"%s" instead.') % (option, CONFIG_DEPRECATIONS[option][1], - CONFIG_DEPRECATIONS[option][0]) + msg = ('Config option "%s" has been deprecated as of nipype %s. ' + 'Please use "%s" instead.') % ( + option, CONFIG_DEPRECATIONS[option][1], + CONFIG_DEPRECATIONS[option][0]) warn(msg) section, option = CONFIG_DEPRECATIONS[option][0].split('.') @@ -176,9 +180,10 @@ def set(self, section, option, value): value = str(value) if option in CONFIG_DEPRECATIONS: - msg = ('Config option "%s" has been deprecated as of nipype %s. Please use ' - '"%s" instead.') % (option, CONFIG_DEPRECATIONS[option][1], - CONFIG_DEPRECATIONS[option][0]) + msg = ('Config option "%s" has been deprecated as of nipype %s. ' + 'Please use "%s" instead.') % ( + option, CONFIG_DEPRECATIONS[option][1], + CONFIG_DEPRECATIONS[option][0]) warn(msg) section, option = CONFIG_DEPRECATIONS[option][0].split('.') @@ -262,7 +267,8 @@ def resource_monitor(self, value): self._resource_monitor = False elif value is True: if not self._resource_monitor: - # Before setting self._resource_monitor check psutil availability + # Before setting self._resource_monitor check psutil + # availability self._resource_monitor = False try: import psutil @@ -272,8 +278,8 @@ def resource_monitor(self, value): pass finally: if not self._resource_monitor: - warn('Could not enable the resource monitor: psutil>=5.0' - ' could not be imported.') + warn('Could not enable the resource monitor: ' + 'psutil>=5.0 could not be imported.') self._config.set('monitoring', 'enabled', ('%s' % self._resource_monitor).lower()) @@ -326,8 +332,8 @@ def _mock(): from xvfbwrapper import Xvfb except ImportError: raise RuntimeError( - 'A display server was required, but $DISPLAY is not defined ' - 'and Xvfb could not be imported.') + 'A display server was required, but $DISPLAY is not ' + 'defined and Xvfb could not be imported.') self._display = Xvfb(nolisten='tcp') self._display.start() @@ -344,7 +350,8 @@ def stop_display(self): if self._display is not None: from .. import logging self._display.stop() - logging.getLogger('interface').debug('Closing display (if virtual)') + logging.getLogger('interface').debug( + 'Closing display (if virtual)') @atexit.register diff --git a/nipype/utils/docparse.py b/nipype/utils/docparse.py index 0d6bce7d45..ee1c4597e1 100644 --- a/nipype/utils/docparse.py +++ b/nipype/utils/docparse.py @@ -13,7 +13,8 @@ docstring = docparse.get_doc(better.cmd, better.opt_map) """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) from builtins import str, open, bytes import subprocess diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 8731aa32eb..cfb33900f7 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -5,7 +5,8 @@ Module to draw an html gantt chart from logfile produced by ``nipype.utils.profiler.log_nodes_cb()`` """ -from __future__ import print_function, division, unicode_literals, absolute_import +from __future__ import (print_function, division, unicode_literals, + absolute_import) # Import packages import sys @@ -22,12 +23,13 @@ try: import pandas as pd except ImportError: - print('Pandas not found; in order for full functionality of this module '\ + print('Pandas not found; in order for full functionality of this module ' 'install the pandas package') pass PY3 = sys.version_info[0] > 2 + def create_event_dict(start_time, nodes_list): ''' Function to generate a dictionary of event (start/finish) nodes @@ -248,7 +250,7 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, scale = space_between_minutes / minute_scale space_between_minutes = space_between_minutes / scale end_times = [datetime.datetime(start.year, start.month, start.day, - start.hour, start.minute, start.second) \ + start.hour, start.minute, start.second) for core in range(cores)] # For each node in the pipeline @@ -258,7 +260,7 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, node_finish = node['finish'] # Calculate an offset and scale duration offset = ((node_start - start).total_seconds() / 60) * scale * \ - space_between_minutes + 220 + space_between_minutes + 220 # Scale duration scale_duration = (node['duration'] / 60) * scale * space_between_minutes if scale_duration < 5: @@ -283,14 +285,14 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, color = 'red' # Setup dictionary for node html string insertion - node_dict = {'left' : left, - 'offset' : offset, - 'scale_duration' : scale_duration, - 'color' : color, - 'node_name' : node['name'], - 'node_dur' : node['duration'] / 60.0, - 'node_start' : node_start.strftime("%Y-%m-%d %H:%M:%S"), - 'node_finish' : node_finish.strftime("%Y-%m-%d %H:%M:%S")} + node_dict = {'left': left, + 'offset': offset, + 'scale_duration': scale_duration, + 'color': color, + 'node_name': node['name'], + 'node_dur': node['duration'] / 60.0, + 'node_start': node_start.strftime("%Y-%m-%d %H:%M:%S"), + 'node_finish': node_finish.strftime("%Y-%m-%d %H:%M:%S")} # Create new node string new_node = "